Compare commits

..

151 Commits

Author SHA1 Message Date
opencode-agent[bot]
7ea9394adc Apply PR #25034: feat: default HTTP API backend to on for dev/beta channels 2026-05-02 22:27:49 +00:00
opencode-agent[bot]
8676280c2e Apply PR #24512: Refactor v2 session events as schemas 2026-05-02 22:27:48 +00:00
opencode-agent[bot]
a0b704c310 Apply PR #24229: fix: lazy session error schema 2026-05-02 22:27:03 +00:00
opencode-agent[bot]
cf9b414b2b Apply PR #22753: core: move plugin intialisation to config layer override 2026-05-02 22:27:03 +00:00
opencode-agent[bot]
5615efedfc Apply PR #21537: fix(app): remove pierre diff virtualization 2026-05-02 22:25:41 +00:00
opencode-agent[bot]
c3edd9df7a Apply PR #20039: feat: bash->shell tool + pwsh/powershell/cmd/bash specific tool definitions so agents work better 2026-05-02 22:24:50 +00:00
opencode-agent[bot]
d6b85af12c Apply PR #19067: ci: only build electron desktop 2026-05-02 22:24:50 +00:00
opencode-agent[bot]
b8216feb08 Apply PR #12633: feat(tui): add auto-accept mode for permission requests 2026-05-02 22:24:49 +00:00
LukeParkerDev
ac5c1e0b33 . 2026-05-03 08:20:48 +10:00
opencode-agent[bot]
aabbe63de2 Apply PR #11710: feat: Add the ability to include cleared prompts in the history, toggled by a KV-persisted command palette item (resolves #11489) 2026-05-02 22:19:39 +00:00
Kit Langton
1986a6e817 refactor(cli): convert session subcommands to effectCmd (#25483) 2026-05-02 18:15:28 -04:00
LukeParkerDev
9e0379f8f6 Merge remote-tracking branch 'upstream/dev' into refactor-shells 2026-05-03 08:12:28 +10:00
opencode-agent[bot]
dfe1325fca chore: generate 2026-05-02 22:02:14 +00:00
Kit Langton
c1686c6ddc refactor(cli): convert stats command to effectCmd (#25474) 2026-05-02 18:01:06 -04:00
Kit Langton
79b6ce5db4 refactor(cli): convert import command to effectCmd (#25467) 2026-05-02 21:56:32 +00:00
Kit Langton
0c816eb4b1 refactor(cli): convert plugin command to effectCmd (#25473) 2026-05-02 17:55:13 -04:00
Kit Langton
e318e173d8 refactor(cli): convert export command to effectCmd (#25471) 2026-05-02 17:45:41 -04:00
opencode-agent[bot]
b314781a1a chore: generate 2026-05-02 21:02:46 +00:00
Kit Langton
8396d6b016 refactor(cli): convert pr command to effectCmd (#25465) 2026-05-02 17:01:46 -04:00
Dax Raad
93c91cefaa fix 2026-05-02 16:14:06 -04:00
Dax Raad
41cbcad5ca sync 2026-05-02 16:13:28 -04:00
Dax Raad
bd7248d29f sync 2026-05-02 16:13:24 -04:00
Dax Raad
105cd15c74 tui: show shell command execution and collapsible tool outputs
Users can now see when shell commands are running in the session view
with real-time output display. Long tool outputs are now collapsible
with click-to-expand/collapse interaction to keep the interface clean
when dealing with verbose command results.
2026-05-02 16:13:24 -04:00
Dax Raad
ea0d6fc1eb tui: close open dialogs when navigating to a new session to prevent UI state from lingering 2026-05-02 16:13:24 -04:00
Dax Raad
78a1084db2 core: add unique IDs to all events for reliable tracking and debugging
Events now include unique identifiers at the payload level, making it easier to trace event flow through the system and debug issues. Session events have been restructured so IDs are consistent across the event bus, database projections, and API responses.

Sessions now persist which agent and model were used, preserving this context in session history. Agent and model switches are now tracked as dedicated message types in sessions, providing a clearer timeline of how the conversation evolved.
2026-05-02 16:13:23 -04:00
Dax Raad
ee7b34b709 core: track agent and model used in each session
Store the active agent and model in the session table so users can see which configuration was used when browsing session history. This helps identify sessions that used specific agents or models for easier filtering and organization.
2026-05-02 16:13:17 -04:00
Dax Raad
4e3a90a16b core: simplify Session.Info schema to empty struct for flexible event handling
This change removes the predefined fields from Session.Info to allow more
dynamic event-driven session data. Instead of fixed schema fields, session
information will be populated through the event system, enabling better
support for evolving session states without schema migrations.

The empty struct serves as a base that can be extended through the event
model, making it easier to add new session attributes without modifying
core schema definitions.
2026-05-02 16:13:17 -04:00
Dax Raad
c7ab272037 core: expose complete session metadata schema for agent session introspection 2026-05-02 16:13:17 -04:00
Dax Raad
48bf20f133 core: add session listing API with filtering and improved pagination
Users can now browse sessions through the new /api/session endpoint with filters for directory, workspace, date range, and title search. Pagination cursors are now labeled 'previous' and 'next' instead of 'before' and 'after' to make navigation direction clearer. Both session lists and message history now support explicit 'asc' or 'desc' ordering so users can choose between newest-first or oldest-first views. The TUI session view now displays messages with the newest at the bottom, matching standard chat interfaces.
2026-05-02 16:13:17 -04:00
Dax Raad
715bba1d07 core: add pagination support for session messages with cursor-based navigation
Enables loading messages in chunks for better performance with long conversations.
Users can now navigate through large session histories without loading all messages at once.
Includes before/after cursors for bi-directional pagination.
2026-05-02 16:13:17 -04:00
Dax Raad
09194e06e2 core: simplify message history pagination with unified cursor API
Replace separate before/after query parameters with a single cursor that
carries direction info. Chat clients can now use 'start' or 'end' keywords
to jump to the beginning or newest messages, and navigate history with a
single cursor parameter instead of managing multiple pagination states.
2026-05-02 16:13:17 -04:00
Dax Raad
69cea948ba refactor(session): define v2 session event schemas 2026-05-02 16:13:17 -04:00
Brendan Allan
7423b4872c rename desktop-darwin to desktop-mac 2026-05-01 11:54:56 +08:00
Brendan Allan
9b85d2cbb4 Merge branch 'dev' into brendan/lazy-init-plugins 2026-05-01 11:48:58 +08:00
Brendan Allan
4a86c2b77a Merge branch 'dev' into brendan/lazy-init-plugins 2026-05-01 11:47:50 +08:00
Kit Langton
1e5cc6da19 feat: default HTTP API backend to on for dev/beta channels
Turn on the experimental effect-httpapi server backend by default for
dev, beta, and local installations so internal users exercise the new
backend. Stable (prod/latest) installs remain on the legacy hono backend.

OPENCODE_EXPERIMENTAL_HTTPAPI=true/1 still force-enables on stable, and
OPENCODE_EXPERIMENTAL_HTTPAPI=false/0 disables it as an escape hatch for
dev/beta users.
2026-04-29 21:35:48 -04:00
Brendan Allan
92d44ce72e create git token after downloading artifacts 2026-04-29 14:25:36 +08:00
LukeParkerDev
529a6ed10f . 2026-04-29 13:00:39 +10:00
Brendan Allan
a3cb00a1ab try improve 2026-04-29 10:17:54 +08:00
LukeParkerDev
20c3461a80 f 2026-04-29 10:10:58 +10:00
LukeParkerDev
f8687190f2 . 2026-04-29 09:47:58 +10:00
LukeParkerDev
d5ebfad838 . 2026-04-29 09:34:28 +10:00
LukeParkerDev
c16a0e08ae Merge remote-tracking branch 'upstream/dev' into refactor-shells 2026-04-29 09:25:48 +10:00
LukeParkerDev
70ca5727a6 noooooooooo brekaing changes 2026-04-29 09:06:05 +10:00
LukeParkerDev
9d9830b7df no breaking changes 2026-04-29 08:51:06 +10:00
Brendan Allan
6a7b415894 read signature from sig file 2026-04-28 19:29:39 +08:00
Brendan Allan
682c4eecd6 don't do tag lookup istg 2026-04-28 15:41:05 +08:00
Brendan Allan
9fbff1bc7d Merge branch 'dev' into brendan/desktop-electron-only 2026-04-28 14:05:53 +08:00
Brendan Allan
dced9c9baa lookup release by id not name 2026-04-28 14:03:15 +08:00
Brendan Allan
55e7bb08d0 remove build-tauri 2026-04-28 12:31:53 +08:00
Brendan Allan
6620054fe1 fix error 2026-04-28 12:28:09 +08:00
Brendan Allan
db830c636b rename opencode-election-* to opencode-desktop-* 2026-04-28 12:28:09 +08:00
Brendan Allan
04c03fa612 ci: only build electron desktop 2026-04-28 12:28:09 +08:00
LukeParkerDev
6ac33ddc4d test: update experimental api shell assertions 2026-04-27 14:30:41 +10:00
LukeParkerDev
ea277baeb7 css 2026-04-27 14:23:37 +10:00
LukeParkerDev
b1d9c57655 Merge remote-tracking branch 'upstream/dev' into refactor-shells 2026-04-27 14:15:07 +10:00
LukeParkerDev
5a7e69b325 Merge remote-tracking branch 'upstream/dev' into refactor-shells 2026-04-27 08:59:39 +10:00
LukeParkerDev
344dab3839 Update next.test.ts 2026-04-26 09:59:46 +10:00
LukeParkerDev
9dde86acbe Merge remote-tracking branch 'upstream/dev' into refactor-shells 2026-04-26 09:56:21 +10:00
Luke Parker
73ee7ae702 Merge branch 'dev' into refactor-shells 2026-04-25 15:23:49 +10:00
LukeParkerDev
2051cadcb8 Update prompt.ts 2026-04-25 15:18:30 +10:00
LukeParkerDev
790d181d8a slight accuracy 2026-04-25 11:37:32 +10:00
LukeParkerDev
ecac4c4e2a split prompt/definition from logic 2026-04-25 11:32:18 +10:00
LukeParkerDev
f89955a4e3 Merge remote-tracking branch 'upstream/dev' into refactor-shells 2026-04-25 11:18:01 +10:00
LukeParkerDev
428b0c46a7 cmd 2026-04-25 11:15:31 +10:00
LukeParkerDev
341b8e78c9 perms 2026-04-25 11:11:42 +10:00
LukeParkerDev
d704110e52 fix: lazy session error schema 2026-04-25 10:04:49 +10:00
Ariane Emory
09e4e5a184 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-04-23 21:55:13 -04:00
LukeParkerDev
4f8ff6ab53 . 2026-04-24 08:23:18 +10:00
LukeParkerDev
7266b48ca0 Merge remote-tracking branch 'upstream/dev' into refactor-shells 2026-04-24 08:11:09 +10:00
LukeParkerDev
26d77add77 edges 2026-04-24 08:03:16 +10:00
LukeParkerDev
cffb8eb1e3 . 2026-04-24 07:54:08 +10:00
LukeParkerDev
0d500a735f Create todo.spec.ts 2026-04-24 07:44:06 +10:00
LukeParkerDev
6d66973fd5 clean 2026-04-24 07:39:19 +10:00
LukeParkerDev
3e30068907 refactor: make shell the canonical tool internals 2026-04-23 19:46:00 +10:00
LukeParkerDev
b75f831eaa . 2026-04-23 17:34:57 +10:00
LukeParkerDev
f9a633bd0b Merge remote-tracking branch 'upstream/dev' into refactor-shells 2026-04-23 17:31:07 +10:00
Brendan Allan
e041605b40 Merge branch 'dev' into brendan/lazy-init-plugins 2026-04-21 12:33:02 +08:00
LukeParkerDev
f280e7e69c fix: defer MessageV2.Assistant.shape access to break circular dep in compiled binary 2026-04-20 16:08:42 +10:00
Brendan Allan
b265742fd0 Merge branch 'dev' into brendan/lazy-init-plugins 2026-04-19 21:15:45 +08:00
Brendan Allan
b1db69fdf7 fix other commands 2026-04-17 17:03:53 +08:00
Brendan Allan
031766efa0 fix tui 2026-04-17 15:44:01 +08:00
Brendan Allan
dc6d39551c address feedback 2026-04-17 15:44:01 +08:00
Brendan Allan
e287569f82 rename layer 2026-04-17 15:44:01 +08:00
Brendan Allan
14eacb4019 core: move plugin intialisation to config layer override 2026-04-17 15:44:01 +08:00
Ariane Emory
731c1e58f2 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-04-16 20:22:02 -04:00
Ariane Emory
c411d37484 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-04-12 04:22:06 -04:00
Adam
cb29742b57 fix(app): remove pierre diff virtualization 2026-04-08 13:16:45 -05:00
LukeParkerDev
ee0884ad31 fix(shell): preserve legacy bash compatibility
Keep mixed shell/bash permission configs ordered correctly and treat --tools bash as the legacy alias during agent creation.
2026-04-08 15:14:45 +10:00
LukeParkerDev
f1547de528 ok 2026-04-08 14:35:16 +10:00
LukeParkerDev
39088e1a1e Merge remote-tracking branch 'upstream/dev' into refactor-shells
# Conflicts:
#	packages/app/e2e/prompt/prompt-shell.spec.ts
#	packages/opencode/src/tool/bash.ts
#	packages/opencode/src/tool/registry.ts
2026-04-08 13:11:43 +10:00
Ariane Emory
97a94571a4 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-04-03 09:19:12 -04:00
LukeParkerDev
25551172c9 fix(shell): avoid abort hangs and utf8 corruption
Attach shell process listeners before handling already-aborted tool signals so canceled runs always settle, and decode shell output as UTF-8 to preserve multibyte characters across chunk boundaries. Also lazy-load shell-specific parsers and hoist command sets so parsing work stays focused on the active shell.
2026-04-03 16:04:41 +10:00
LukeParkerDev
32ec3666b7 fix(shell): keep shell config consistent
Treat shell access as one logical toggle during agent creation and apply bash compatibility rules before explicit per-shell overrides. This avoids disabling the active Windows shell unexpectedly and keeps pwsh and powershell overrides deterministic.
2026-04-03 15:08:30 +10:00
LukeParkerDev
2eb9ae4d34 refactor(shell): centralize shell tool identity
Move shell tool ID checks behind shared helpers so runtime code and tests stop duplicating bash, pwsh, and powershell branches. This keeps shell-specific behavior aligned across consumers and makes follow-on shell changes less error-prone.
2026-04-03 14:56:40 +10:00
LukeParkerDev
baf476f431 test(shell): handle nullable exit metadata
Make the shell exit assertions typecheck cleanly while keeping the PowerShell regression coverage. Remove the accidentally committed .opencode package-lock so generated state does not ship in the branch.
2026-04-03 14:29:04 +10:00
LukeParkerDev
23e77fd9bc fix(shell): preserve powershell exit codes
Use a multiline PowerShell trailer so native Windows commands keep their actual exit status without masking cmdlet failures, and add focused regression coverage. Remove the accidentally committed .opencode package-lock to keep generated state out of the branch.
2026-04-03 14:27:03 +10:00
LukeParkerDev
6ad6358eb1 fix: render pwsh and powershell tools correctly in UI
This fixes regressions from splitting the shell tools where powershell commands were missing their native exit codes and their correct UI rendering.
2026-04-03 14:01:13 +10:00
LukeParkerDev
95577c75a3 fix(config): preserve bash permission compatibility
Keep legacy tools.bash migration mapped to the single bash permission since the permission layer already expands it to pwsh and powershell. This preserves the backward-compatible config shape while retaining shell compatibility.
2026-04-03 13:43:37 +10:00
LukeParkerDev
f21bf4a62a Merge remote-tracking branch 'upstream/dev' into refactor-shells 2026-04-03 13:34:16 +10:00
LukeParkerDev
676519d79d refactor: apply positive guidance and parameterize shell commands in prompt template 2026-03-30 20:42:42 +10:00
LukeParkerDev
48f9082d0a refactor: use positive tone in shell guidance prompts 2026-03-30 20:24:49 +10:00
LukeParkerDev
51ebba2975 refactor: add shell-specific guidance to each tool prompt 2026-03-30 20:18:50 +10:00
LukeParkerDev
3e26c3ae83 refactor: extract shell tool factory to eliminate duplication 2026-03-30 20:15:58 +10:00
LukeParkerDev
67dfbcbcfd fix: use dynamic imports for tree-sitter and shell-aware metadata tags 2026-03-30 20:12:36 +10:00
LukeParkerDev
048ac63abd refactor: split monolithic bash tool into separate bash/pwsh/powershell tools 2026-03-30 20:08:27 +10:00
Ariane Emory
6652585a7f Merge branch 'dev' into feat/canceled-prompts-in-history 2026-03-24 11:17:40 -04:00
Ariane Emory
532b64c0d5 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-03-24 07:43:03 -04:00
Ariane Emory
eec4c775a7 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-03-23 21:10:21 -04:00
Ariane Emory
01e350449c Merge branch 'dev' into feat/canceled-prompts-in-history 2026-03-20 19:12:18 -04:00
Dax
5792a80a8c Merge branch 'dev' into feat/auto-accept-permissions 2026-03-20 10:46:31 -04:00
Dax Raad
db039db7f5 regen js sdk 2026-03-20 10:21:10 -04:00
Dax Raad
c1a3936b61 Merge remote-tracking branch 'origin/dev' into feat/auto-accept-permissions
# Conflicts:
#	packages/sdk/js/src/v2/gen/types.gen.ts
2026-03-20 10:20:26 -04:00
Ariane Emory
a9d9e4d9c4 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-03-20 03:35:16 -04:00
Ariane Emory
2531b2d3a9 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-03-13 11:47:39 -04:00
Ariane Emory
a718f86e0f Merge branch 'dev' into feat/canceled-prompts-in-history 2026-03-08 19:28:41 -04:00
Ariane Emory
f3efdff861 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-03-08 08:36:02 -04:00
Ariane Emory
955d8591df Merge branch 'dev' into feat/canceled-prompts-in-history 2026-03-05 18:24:19 -05:00
Ariane Emory
33b3388bf4 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-26 17:50:11 -05:00
Ariane Emory
716f40b128 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-26 01:36:39 -05:00
Ariane Emory
0b06ff1407 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-20 21:24:12 -05:00
Ariane Emory
01ff5b5390 Merge branch 'dev' into feat/canceled-prompts-in-history
# Conflicts:
#	packages/opencode/src/cli/cmd/tui/component/prompt/history.tsx
2026-02-20 02:16:02 -05:00
Ariane Emory
3d1b121e70 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-19 19:18:48 -05:00
Ariane Emory
b70629af27 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-18 19:10:26 -05:00
Ariane Emory
b7b016fa28 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-17 00:09:51 -05:00
Ariane Emory
5ba2d7e5f0 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-15 12:27:51 -05:00
Ariane Emory
459b22b83d Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-14 19:21:47 -05:00
Ariane Emory
377812b98a Merge dev into feat/canceled-prompts-in-history 2026-02-14 06:28:48 -05:00
Ariane Emory
5cc0901e38 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-13 09:37:11 -05:00
Ariane Emory
7fb6b589d1 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-12 18:29:23 -05:00
Ariane Emory
3f37b43e7d Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-11 12:46:47 -05:00
Ariane Emory
8805dfc849 fix: deduplicate prompt history entries
Avoid adding duplicate entries to prompt history when the same input
is appended multiple times (e.g., clearing with ctrl+c then restoring
via history navigation and clearing again).
2026-02-10 22:21:39 -05:00
Ariane Emory
ac5a5d8b16 Merge branch 'feat/canceled-prompts-in-history' of github.com:ariane-emory/opencode into feat/canceled-prompts-in-history 2026-02-10 16:37:55 -05:00
Ariane Emory
eaf94ed047 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-10 16:29:05 -05:00
Ariane Emory
b8031c5ae8 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-10 16:10:35 -05:00
Dax Raad
a531f3f36d core: run command build agent now auto-accepts file edits to reduce workflow interruptions while still requiring confirmation for bash commands 2026-02-07 20:00:09 -05:00
Dax Raad
bb3382311d tui: standardize autoedit indicator text styling to match other status labels 2026-02-07 19:57:45 -05:00
Dax Raad
ad545d0cc9 tui: allow auto-accepting only edit permissions instead of all permissions 2026-02-07 19:52:53 -05:00
Dax Raad
ac244b1458 tui: add searchable 'toggle' keywords to command palette and show current state in toggle titles 2026-02-07 17:03:34 -05:00
Dax Raad
f202536b65 tui: show enable/disable state in permission toggle and make it searchable by 'toggle permissions' 2026-02-07 16:57:48 -05:00
Dax Raad
405cc3f610 tui: streamline permission toggle command naming and add keyboard shortcut support
Rename 'Toggle autoaccept permissions' to 'Toggle permissions' for clarity
and move the command to the Agent category for better discoverability.
Add permission_auto_accept_toggle keybind to enable keyboard shortcut
toggling of auto-accept mode for permission requests.
2026-02-07 16:51:55 -05:00
Dax Raad
878c1b8c2d feat(tui): add auto-accept mode for permission requests
Add a toggleable auto-accept mode that automatically accepts all incoming
permission requests with a 'once' reply. This is useful for users who want
to streamline their workflow when they trust the agent's actions.

Changes:
- Add permission_auto_accept keybind (default: shift+tab) to config
- Remove default for agent_cycle_reverse (was shift+tab)
- Add auto-accept logic in sync.tsx to auto-reply when enabled
- Add command bar action to toggle auto-accept mode (copy: "Toggle autoaccept permissions")
- Add visual indicator showing 'auto-accept' when active
- Store auto-accept state in KV for persistence across sessions
2026-02-07 16:44:39 -05:00
Ariane Emory
d5dcadc000 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-07 13:34:42 -05:00
Ariane Emory
0c154e6a2f Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-06 15:59:50 -05:00
Ariane Emory
4f96975148 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-05 18:17:01 -05:00
Ariane Emory
eaba99711b Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-04 19:33:59 -05:00
Ariane Emory
f762125775 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-03 18:36:44 -05:00
Ariane Emory
ded6bb6513 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-02 21:23:28 -05:00
Ariane Emory
39332f5be6 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-01 22:33:29 -05:00
Ariane Emory
2c6ff35400 feat: add toggle to control whether cleared prompts are saved to history
Adds a toggle command in the System category that allows users to enable
or disable saving cleared prompts to history. The feature is disabled by
default to preserve existing behavior.

When enabled via the command palette ("Include cleared prompts in history"),
pressing Ctrl+C will save the current prompt to history before clearing it,
allowing users to navigate back with arrow keys.

The setting persists in kv.json.
2026-02-01 21:12:48 -05:00
Ariane Emory
738d6c8899 feat: save prompt to history when cleared with Ctrl+C
When users press Ctrl+C to clear the input field, the current prompt
is now saved to history before clearing. This allows users to navigate
back to cleared prompts using arrow keys, preventing loss of work.

Addresses #11489
2026-02-01 21:01:15 -05:00
130 changed files with 10336 additions and 3976 deletions

View File

@@ -209,182 +209,6 @@ jobs:
packages/opencode/dist/opencode-windows-x64
packages/opencode/dist/opencode-windows-x64-baseline
build-tauri:
needs:
- build-cli
- version
continue-on-error: false
env:
AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }}
AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }}
AZURE_SUBSCRIPTION_ID: ${{ secrets.AZURE_SUBSCRIPTION_ID }}
AZURE_TRUSTED_SIGNING_ACCOUNT_NAME: ${{ secrets.AZURE_TRUSTED_SIGNING_ACCOUNT_NAME }}
AZURE_TRUSTED_SIGNING_CERTIFICATE_PROFILE: ${{ secrets.AZURE_TRUSTED_SIGNING_CERTIFICATE_PROFILE }}
AZURE_TRUSTED_SIGNING_ENDPOINT: ${{ secrets.AZURE_TRUSTED_SIGNING_ENDPOINT }}
strategy:
fail-fast: false
matrix:
settings:
- host: macos-latest
target: x86_64-apple-darwin
- host: macos-latest
target: aarch64-apple-darwin
# github-hosted: blacksmith lacks ARM64 MSVC cross-compilation toolchain
- host: windows-2025
target: aarch64-pc-windows-msvc
- host: blacksmith-4vcpu-windows-2025
target: x86_64-pc-windows-msvc
- host: blacksmith-4vcpu-ubuntu-2404
target: x86_64-unknown-linux-gnu
- host: blacksmith-8vcpu-ubuntu-2404-arm
target: aarch64-unknown-linux-gnu
runs-on: ${{ matrix.settings.host }}
steps:
- uses: actions/checkout@v3
with:
fetch-tags: true
- uses: apple-actions/import-codesign-certs@v2
if: ${{ runner.os == 'macOS' }}
with:
keychain: build
p12-file-base64: ${{ secrets.APPLE_CERTIFICATE }}
p12-password: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
- name: Verify Certificate
if: ${{ runner.os == 'macOS' }}
run: |
CERT_INFO=$(security find-identity -v -p codesigning build.keychain | grep "Developer ID Application")
CERT_ID=$(echo "$CERT_INFO" | awk -F'"' '{print $2}')
echo "CERT_ID=$CERT_ID" >> $GITHUB_ENV
echo "Certificate imported."
- name: Setup Apple API Key
if: ${{ runner.os == 'macOS' }}
run: |
echo "${{ secrets.APPLE_API_KEY_PATH }}" > $RUNNER_TEMP/apple-api-key.p8
- uses: ./.github/actions/setup-bun
- name: Azure login
if: runner.os == 'Windows'
uses: azure/login@v2
with:
client-id: ${{ env.AZURE_CLIENT_ID }}
tenant-id: ${{ env.AZURE_TENANT_ID }}
subscription-id: ${{ env.AZURE_SUBSCRIPTION_ID }}
- uses: actions/setup-node@v4
with:
node-version: "24"
- name: Cache apt packages
if: contains(matrix.settings.host, 'ubuntu')
uses: actions/cache@v4
with:
path: ~/apt-cache
key: ${{ runner.os }}-${{ matrix.settings.target }}-apt-${{ hashFiles('.github/workflows/publish.yml') }}
restore-keys: |
${{ runner.os }}-${{ matrix.settings.target }}-apt-
- name: install dependencies (ubuntu only)
if: contains(matrix.settings.host, 'ubuntu')
run: |
mkdir -p ~/apt-cache && chmod -R a+rw ~/apt-cache
sudo apt-get update
sudo apt-get install -y --no-install-recommends -o dir::cache::archives="$HOME/apt-cache" libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf
sudo chmod -R a+rw ~/apt-cache
- name: install Rust stable
uses: dtolnay/rust-toolchain@stable
with:
targets: ${{ matrix.settings.target }}
- uses: Swatinem/rust-cache@v2
with:
workspaces: packages/desktop/src-tauri
shared-key: ${{ matrix.settings.target }}
- name: Prepare
run: |
cd packages/desktop
bun ./scripts/prepare.ts
env:
OPENCODE_VERSION: ${{ needs.version.outputs.version }}
GITHUB_TOKEN: ${{ steps.committer.outputs.token }}
OPENCODE_CLI_ARTIFACT: ${{ (runner.os == 'Windows' && 'opencode-cli-windows') || 'opencode-cli' }}
RUST_TARGET: ${{ matrix.settings.target }}
GH_TOKEN: ${{ github.token }}
GITHUB_RUN_ID: ${{ github.run_id }}
- name: Resolve tauri portable SHA
if: contains(matrix.settings.host, 'ubuntu')
run: echo "TAURI_PORTABLE_SHA=$(git ls-remote https://github.com/tauri-apps/tauri.git refs/heads/feat/truly-portable-appimage | cut -f1)" >> "$GITHUB_ENV"
# Fixes AppImage build issues, can be removed when https://github.com/tauri-apps/tauri/pull/12491 is released
- name: Install tauri-cli from portable appimage branch
uses: taiki-e/cache-cargo-install-action@v3
if: contains(matrix.settings.host, 'ubuntu')
with:
tool: tauri-cli
git: https://github.com/tauri-apps/tauri
# branch: feat/truly-portable-appimage
rev: ${{ env.TAURI_PORTABLE_SHA }}
- name: Show tauri-cli version
if: contains(matrix.settings.host, 'ubuntu')
run: cargo tauri --version
- name: Setup git committer
id: committer
uses: ./.github/actions/setup-git-committer
with:
opencode-app-id: ${{ vars.OPENCODE_APP_ID }}
opencode-app-secret: ${{ secrets.OPENCODE_APP_SECRET }}
- name: Build and upload artifacts
uses: tauri-apps/tauri-action@390cbe447412ced1303d35abe75287949e43437a
timeout-minutes: 60
with:
projectPath: packages/desktop
uploadWorkflowArtifacts: true
tauriScript: ${{ (contains(matrix.settings.host, 'ubuntu') && 'cargo tauri') || '' }}
args: --target ${{ matrix.settings.target }} --config ${{ (github.ref_name == 'beta' && './src-tauri/tauri.beta.conf.json') || './src-tauri/tauri.prod.conf.json' }} --verbose
updaterJsonPreferNsis: true
releaseId: ${{ needs.version.outputs.release }}
tagName: ${{ needs.version.outputs.tag }}
releaseDraft: true
releaseAssetNamePattern: opencode-desktop-[platform]-[arch][ext]
repo: ${{ (github.ref_name == 'beta' && 'opencode-beta') || '' }}
releaseCommitish: ${{ github.sha }}
env:
GITHUB_TOKEN: ${{ steps.committer.outputs.token }}
TAURI_BUNDLER_NEW_APPIMAGE_FORMAT: true
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}
APPLE_CERTIFICATE: ${{ secrets.APPLE_CERTIFICATE }}
APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
APPLE_SIGNING_IDENTITY: ${{ env.CERT_ID }}
APPLE_API_ISSUER: ${{ secrets.APPLE_API_ISSUER }}
APPLE_API_KEY: ${{ secrets.APPLE_API_KEY }}
APPLE_API_KEY_PATH: ${{ runner.temp }}/apple-api-key.p8
- name: Verify signed Windows desktop artifacts
if: runner.os == 'Windows'
shell: pwsh
run: |
$files = @(
"${{ github.workspace }}\packages\desktop\src-tauri\sidecars\opencode-cli-${{ matrix.settings.target }}.exe"
)
$files += Get-ChildItem "${{ github.workspace }}\packages\desktop\src-tauri\target\${{ matrix.settings.target }}\release\bundle\nsis\*.exe" | Select-Object -ExpandProperty FullName
foreach ($file in $files) {
$sig = Get-AuthenticodeSignature $file
if ($sig.Status -ne "Valid") {
throw "Invalid signature for ${file}: $($sig.Status)"
}
}
build-electron:
needs:
- build-cli
@@ -524,6 +348,30 @@ jobs:
env:
OPENCODE_CHANNEL: ${{ (github.ref_name == 'beta' && 'beta') || 'prod' }}
- name: Create and upload macOS .app.tar.gz
if: runner.os == 'macOS' && needs.version.outputs.release
working-directory: packages/desktop-electron/dist
env:
GH_TOKEN: ${{ steps.committer.outputs.token }}
run: |
if [[ "${{ matrix.settings.target }}" == "x86_64-apple-darwin" ]]; then
APP_DIR="mac"
OUT_NAME="opencode-desktop-mac-x64.app.tar.gz"
elif [[ "${{ matrix.settings.target }}" == "aarch64-apple-darwin" ]]; then
APP_DIR="mac-arm64"
OUT_NAME="opencode-desktop-mac-arm64.app.tar.gz"
else
echo "Unknown macOS target: ${{ matrix.settings.target }}"
exit 1
fi
APP_PATH=$(find "$APP_DIR" -maxdepth 1 -name "*.app" -type d | head -1)
if [ -z "$APP_PATH" ]; then
echo "No .app bundle found in $APP_DIR"
exit 1
fi
tar -czf "$OUT_NAME" -C "$(dirname "$APP_PATH")" "$(basename "$APP_PATH")"
gh release upload "v${{ needs.version.outputs.version }}" "$OUT_NAME" --clobber --repo "${{ needs.version.outputs.repo }}"
- name: Verify signed Windows Electron artifacts
if: runner.os == 'Windows'
shell: pwsh
@@ -542,7 +390,7 @@ jobs:
- uses: actions/upload-artifact@v4
with:
name: opencode-electron-${{ matrix.settings.target }}
name: opencode-desktop-${{ matrix.settings.target }}
path: packages/desktop-electron/dist/*
- uses: actions/upload-artifact@v4
@@ -556,7 +404,6 @@ jobs:
- version
- build-cli
- sign-cli-windows
- build-tauri
- build-electron
if: always() && !failure() && !cancelled()
runs-on: blacksmith-4vcpu-ubuntu-2404
@@ -583,13 +430,6 @@ jobs:
node-version: "24"
registry-url: "https://registry.npmjs.org"
- name: Setup git committer
id: committer
uses: ./.github/actions/setup-git-committer
with:
opencode-app-id: ${{ vars.OPENCODE_APP_ID }}
opencode-app-secret: ${{ secrets.OPENCODE_APP_SECRET }}
- uses: actions/download-artifact@v4
with:
name: opencode-cli
@@ -611,6 +451,13 @@ jobs:
pattern: latest-yml-*
path: /tmp/latest-yml
- name: Setup git committer
id: committer
uses: ./.github/actions/setup-git-committer
with:
opencode-app-id: ${{ vars.OPENCODE_APP_ID }}
opencode-app-secret: ${{ secrets.OPENCODE_APP_SECRET }}
- name: Cache apt packages (AUR)
uses: actions/cache@v4
with:
@@ -639,3 +486,5 @@ jobs:
GH_REPO: ${{ needs.version.outputs.repo }}
NPM_CONFIG_PROVENANCE: false
LATEST_YML_DIR: /tmp/latest-yml
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}

View File

@@ -1,4 +1,5 @@
import { Config } from "effect"
import { InstallationChannel } from "../installation/version"
function truthy(key: string) {
const value = process.env[key]?.toLowerCase()
@@ -10,6 +11,10 @@ function falsy(key: string) {
return value === "false" || value === "0"
}
// Channels that default to the new effect-httpapi server backend. The legacy
// hono backend remains the default for stable (`prod`/`latest`) installs.
const HTTPAPI_DEFAULT_ON_CHANNELS = new Set(["dev", "beta", "local"])
function number(key: string) {
const value = process.env[key]
if (!value) return undefined
@@ -81,8 +86,16 @@ export const Flag = {
OPENCODE_STRICT_CONFIG_DEPS: truthy("OPENCODE_STRICT_CONFIG_DEPS"),
OPENCODE_WORKSPACE_ID: process.env["OPENCODE_WORKSPACE_ID"],
OPENCODE_EXPERIMENTAL_HTTPAPI: truthy("OPENCODE_EXPERIMENTAL_HTTPAPI"),
// Defaults to true on dev/beta/local channels so internal users exercise the
// new effect-httpapi server backend. Stable (`prod`/`latest`) installs stay
// on the legacy hono backend until the rollout is complete. An explicit env
// var ("true"/"1" or "false"/"0") always wins, providing an opt-in for
// stable users and an escape hatch for dev/beta users.
OPENCODE_EXPERIMENTAL_HTTPAPI:
truthy("OPENCODE_EXPERIMENTAL_HTTPAPI") ||
(!falsy("OPENCODE_EXPERIMENTAL_HTTPAPI") && HTTPAPI_DEFAULT_ON_CHANNELS.has(InstallationChannel)),
OPENCODE_EXPERIMENTAL_WORKSPACES: OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_WORKSPACES"),
OPENCODE_EXPERIMENTAL_EVENT_SYSTEM: OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_EVENT_SYSTEM"),
// Evaluated at access time (not module load) because tests, the CLI, and
// external tooling set these env vars at runtime.

View File

@@ -1,3 +1,5 @@
export * as Log from "./log"
import path from "path"
import fs from "fs/promises"
import { createWriteStream } from "fs"

View File

@@ -27,7 +27,7 @@ const channel = (() => {
})()
const getBase = (): Configuration => ({
artifactName: "opencode-electron-${os}-${arch}.${ext}",
artifactName: "opencode-desktop-${os}-${arch}.${ext}",
directories: {
output: "dist",
buildResources: "resources",

View File

@@ -1,7 +1,8 @@
#!/usr/bin/env bun
import { Buffer } from "node:buffer"
import { $ } from "bun"
import path from "node:path"
import { parseArgs } from "node:util"
const { values } = parseArgs({
args: Bun.argv.slice(2),
@@ -12,8 +13,6 @@ const { values } = parseArgs({
const dryRun = values["dry-run"]
import { parseArgs } from "node:util"
const repo = process.env.GH_REPO
if (!repo) throw new Error("GH_REPO is required")
@@ -23,20 +22,22 @@ if (!releaseId) throw new Error("OPENCODE_RELEASE is required")
const version = process.env.OPENCODE_VERSION
if (!version) throw new Error("OPENCODE_VERSION is required")
const dir = process.env.LATEST_YML_DIR
if (!dir) throw new Error("LATEST_YML_DIR is required")
const root = dir
const token = process.env.GH_TOKEN ?? process.env.GITHUB_TOKEN
if (!token) throw new Error("GH_TOKEN or GITHUB_TOKEN is required")
const apiHeaders = {
Authorization: `token ${token}`,
Accept: "application/vnd.github+json",
}
const releaseRes = await fetch(`https://api.github.com/repos/${repo}/releases/${releaseId}`, {
headers: apiHeaders,
const rel = await fetch(`https://api.github.com/repos/${repo}/releases/${releaseId}`, {
headers: {
Authorization: `token ${token}`,
Accept: "application/vnd.github+json",
},
})
if (!releaseRes.ok) {
throw new Error(`Failed to fetch release: ${releaseRes.status} ${releaseRes.statusText}`)
if (!rel.ok) {
throw new Error(`Failed to fetch release: ${rel.status} ${rel.statusText}`)
}
type Asset = {
@@ -45,115 +46,169 @@ type Asset = {
}
type Release = {
tag_name?: string
assets?: Asset[]
}
const release = (await releaseRes.json()) as Release
const assets = release.assets ?? []
const assetByName = new Map(assets.map((asset) => [asset.name, asset]))
const assets = ((await rel.json()) as Release).assets ?? []
const amap = new Map(assets.map((item) => [item.name, item]))
const latestAsset = assetByName.get("latest.json")
if (!latestAsset) {
console.log("latest.json not found, skipping tauri finalization")
process.exit(0)
type Item = {
url: string
}
const latestRes = await fetch(latestAsset.url, {
headers: {
Authorization: `token ${token}`,
Accept: "application/octet-stream",
},
})
if (!latestRes.ok) {
throw new Error(`Failed to fetch latest.json: ${latestRes.status} ${latestRes.statusText}`)
type Yml = {
version: string
files: Item[]
}
const latestText = new TextDecoder().decode(await latestRes.arrayBuffer())
const latest = JSON.parse(latestText)
const base = { ...latest }
delete base.platforms
function parse(text: string): Yml {
const lines = text.split("\n")
let version = ""
const files: Item[] = []
let url = ""
const fetchSignature = async (asset: Asset) => {
const res = await fetch(asset.url, {
const flush = () => {
if (!url) return
files.push({ url })
url = ""
}
for (const line of lines) {
const trim = line.trim()
if (line.startsWith("version:")) {
version = line.slice("version:".length).trim()
continue
}
if (trim.startsWith("- url:")) {
flush()
url = trim.slice("- url:".length).trim()
continue
}
const indented = line.startsWith(" ") || line.startsWith("\t")
if (!indented) flush()
}
flush()
return { version, files }
}
async function read(sub: string, file: string) {
const item = Bun.file(path.join(root, sub, file))
if (!(await item.exists())) return undefined
return parse(await item.text())
}
function pick(list: Item[], exts: string[]) {
for (const ext of exts) {
const found = list.find((item) => item.url.split("?")[0]?.toLowerCase().endsWith(ext))
if (found) return found.url
}
}
function link(raw: string) {
if (raw.startsWith("https://") || raw.startsWith("http://")) return raw
return `https://github.com/${repo}/releases/download/v${version}/${raw}`
}
async function sign(url: string, key: string) {
const name = decodeURIComponent(new URL(url).pathname.split("/").pop() ?? key)
const asset = amap.get(name)
const res = await fetch(asset?.url ?? url, {
headers: {
Authorization: `token ${token}`,
Accept: "application/octet-stream",
...(asset ? { Accept: "application/octet-stream" } : {}),
},
})
if (!res.ok) {
throw new Error(`Failed to fetch signature: ${res.status} ${res.statusText}`)
throw new Error(`Failed to fetch file ${name}: ${res.status} ${res.statusText} (${asset?.url ?? url})`)
}
return Buffer.from(await res.arrayBuffer()).toString()
const tmp = process.env.RUNNER_TEMP ?? "/tmp"
const file = path.join(tmp, name)
await Bun.write(file, await res.arrayBuffer())
await $`bunx @tauri-apps/cli signer sign ${file}`
const sigFile = Bun.file(`${file}.sig`)
if (!(await sigFile.exists())) throw new Error(`Signature file not found for ${name}`)
return (await sigFile.text()).trim()
}
const entries: Record<string, { url: string; signature: string }> = {}
const add = (key: string, asset: Asset, signature: string) => {
if (entries[key]) return
entries[key] = {
url: `https://github.com/${repo}/releases/download/v${version}/${asset.name}`,
signature,
}
const add = async (data: Record<string, { url: string; signature: string }>, key: string, raw: string | undefined) => {
if (!raw) return
if (data[key]) return
const url = link(raw)
data[key] = { url, signature: await sign(url, key) }
}
const targets = [
{ key: "linux-x86_64-deb", asset: "opencode-desktop-linux-amd64.deb" },
{ key: "linux-x86_64-rpm", asset: "opencode-desktop-linux-x86_64.rpm" },
{ key: "linux-aarch64-deb", asset: "opencode-desktop-linux-arm64.deb" },
{ key: "linux-aarch64-rpm", asset: "opencode-desktop-linux-aarch64.rpm" },
{ key: "windows-aarch64-nsis", asset: "opencode-desktop-windows-arm64.exe" },
{ key: "windows-x86_64-nsis", asset: "opencode-desktop-windows-x64.exe" },
{ key: "darwin-x86_64-app", asset: "opencode-desktop-darwin-x64.app.tar.gz" },
{
key: "darwin-aarch64-app",
asset: "opencode-desktop-darwin-aarch64.app.tar.gz",
},
]
for (const target of targets) {
const asset = assetByName.get(target.asset)
if (!asset) continue
const sig = assetByName.get(`${target.asset}.sig`)
if (!sig) continue
const signature = await fetchSignature(sig)
add(target.key, asset, signature)
const alias = (data: Record<string, { url: string; signature: string }>, key: string, src: string) => {
if (data[key]) return
if (!data[src]) return
data[key] = data[src]
}
const alias = (key: string, source: string) => {
if (entries[key]) return
const entry = entries[source]
if (!entry) return
entries[key] = entry
}
const winx = await read("latest-yml-x86_64-pc-windows-msvc", "latest.yml")
const wina = await read("latest-yml-aarch64-pc-windows-msvc", "latest.yml")
const macx = await read("latest-yml-x86_64-apple-darwin", "latest-mac.yml")
const maca = await read("latest-yml-aarch64-apple-darwin", "latest-mac.yml")
const linx = await read("latest-yml-x86_64-unknown-linux-gnu", "latest-linux.yml")
const lina = await read("latest-yml-aarch64-unknown-linux-gnu", "latest-linux-arm64.yml")
alias("linux-x86_64", "linux-x86_64-deb")
alias("linux-aarch64", "linux-aarch64-deb")
alias("windows-aarch64", "windows-aarch64-nsis")
alias("windows-x86_64", "windows-x86_64-nsis")
alias("darwin-x86_64", "darwin-x86_64-app")
alias("darwin-aarch64", "darwin-aarch64-app")
const yver = winx?.version ?? wina?.version ?? macx?.version ?? maca?.version ?? linx?.version ?? lina?.version
if (yver && yver !== version) throw new Error(`latest.yml version mismatch: expected ${version}, got ${yver}`)
const out: Record<string, { url: string; signature: string }> = {}
const winxexe = pick(winx?.files ?? [], [".exe"])
const winaexe = pick(wina?.files ?? [], [".exe"])
const macxTarGz = "opencode-desktop-mac-x64.app.tar.gz"
const macaTarGz = "opencode-desktop-mac-arm64.app.tar.gz"
const linxDeb = pick(linx?.files ?? [], [".deb"])
const linxRpm = pick(linx?.files ?? [], [".rpm"])
const linxAppImage = pick(linx?.files ?? [], [".appimage"])
const linaDeb = pick(lina?.files ?? [], [".deb"])
const linaRpm = pick(lina?.files ?? [], [".rpm"])
const linaAppImage = pick(lina?.files ?? [], [".appimage"])
await add(out, "windows-x86_64-nsis", winxexe)
await add(out, "windows-aarch64-nsis", winaexe)
await add(out, "darwin-x86_64-app", macxTarGz)
await add(out, "darwin-aarch64-app", macaTarGz)
await add(out, "linux-x86_64-deb", linxDeb)
await add(out, "linux-x86_64-rpm", linxRpm)
await add(out, "linux-x86_64-appimage", linxAppImage)
await add(out, "linux-aarch64-deb", linaDeb)
await add(out, "linux-aarch64-rpm", linaRpm)
await add(out, "linux-aarch64-appimage", linaAppImage)
alias(out, "windows-x86_64", "windows-x86_64-nsis")
alias(out, "windows-aarch64", "windows-aarch64-nsis")
alias(out, "darwin-x86_64", "darwin-x86_64-app")
alias(out, "darwin-aarch64", "darwin-aarch64-app")
alias(out, "linux-x86_64", "linux-x86_64-deb")
alias(out, "linux-aarch64", "linux-aarch64-deb")
const platforms = Object.fromEntries(
Object.keys(entries)
Object.keys(out)
.sort()
.map((key) => [key, entries[key]]),
.map((key) => [key, out[key]]),
)
const output = {
...base,
if (!Object.keys(platforms).length) throw new Error("No updater files found in latest.yml artifacts")
const data = {
version,
notes: "",
pub_date: new Date().toISOString(),
platforms,
}
const dir = process.env.RUNNER_TEMP ?? "/tmp"
const file = `${dir}/latest.json`
await Bun.write(file, JSON.stringify(output, null, 2))
const tmp = process.env.RUNNER_TEMP ?? "/tmp"
const file = path.join(tmp, "latest.json")
await Bun.write(file, JSON.stringify(data, null, 2))
const tag = release.tag_name
if (!tag) throw new Error("Release tag not found")
const tag = `v${version}`
if (dryRun) {
console.log(`dry-run: wrote latest.json for ${tag} to ${file}`)

View File

@@ -0,0 +1,17 @@
CREATE TABLE `session_message` (
`id` text PRIMARY KEY,
`session_id` text NOT NULL,
`type` text NOT NULL,
`time_created` integer NOT NULL,
`time_updated` integer NOT NULL,
`data` text NOT NULL,
CONSTRAINT `fk_session_message_session_id_session_id_fk` FOREIGN KEY (`session_id`) REFERENCES `session`(`id`) ON DELETE CASCADE
);
--> statement-breakpoint
DROP INDEX IF EXISTS `session_entry_session_idx`;--> statement-breakpoint
DROP INDEX IF EXISTS `session_entry_session_type_idx`;--> statement-breakpoint
DROP INDEX IF EXISTS `session_entry_time_created_idx`;--> statement-breakpoint
CREATE INDEX `session_message_session_idx` ON `session_message` (`session_id`);--> statement-breakpoint
CREATE INDEX `session_message_session_type_idx` ON `session_message` (`session_id`,`type`);--> statement-breakpoint
CREATE INDEX `session_message_time_created_idx` ON `session_message` (`time_created`);--> statement-breakpoint
DROP TABLE `session_entry`;

File diff suppressed because it is too large Load Diff

View File

@@ -2,7 +2,9 @@
"version": "7",
"dialect": "sqlite",
"id": "aaa2ebeb-caa4-478d-8365-4fc595d16856",
"prevIds": ["66cbe0d7-def0-451b-b88a-7608513a9b44"],
"prevIds": [
"61f807f9-6398-4067-be05-804acc2561bc"
],
"ddl": [
{
"name": "account_state",
@@ -37,7 +39,7 @@
"entityType": "tables"
},
{
"name": "session_entry",
"name": "session_message",
"entityType": "tables"
},
{
@@ -598,7 +600,7 @@
"generated": null,
"name": "id",
"entityType": "columns",
"table": "session_entry"
"table": "session_message"
},
{
"type": "text",
@@ -608,7 +610,7 @@
"generated": null,
"name": "session_id",
"entityType": "columns",
"table": "session_entry"
"table": "session_message"
},
{
"type": "text",
@@ -618,7 +620,7 @@
"generated": null,
"name": "type",
"entityType": "columns",
"table": "session_entry"
"table": "session_message"
},
{
"type": "integer",
@@ -628,7 +630,7 @@
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "session_entry"
"table": "session_message"
},
{
"type": "integer",
@@ -638,7 +640,7 @@
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "session_entry"
"table": "session_message"
},
{
"type": "text",
@@ -648,7 +650,7 @@
"generated": null,
"name": "data",
"entityType": "columns",
"table": "session_entry"
"table": "session_message"
},
{
"type": "text",
@@ -1051,9 +1053,13 @@
"table": "event"
},
{
"columns": ["active_account_id"],
"columns": [
"active_account_id"
],
"tableTo": "account",
"columnsTo": ["id"],
"columnsTo": [
"id"
],
"onUpdate": "NO ACTION",
"onDelete": "SET NULL",
"nameExplicit": false,
@@ -1062,9 +1068,13 @@
"table": "account_state"
},
{
"columns": ["project_id"],
"columns": [
"project_id"
],
"tableTo": "project",
"columnsTo": ["id"],
"columnsTo": [
"id"
],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
@@ -1073,9 +1083,13 @@
"table": "workspace"
},
{
"columns": ["session_id"],
"columns": [
"session_id"
],
"tableTo": "session",
"columnsTo": ["id"],
"columnsTo": [
"id"
],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
@@ -1084,9 +1098,13 @@
"table": "message"
},
{
"columns": ["message_id"],
"columns": [
"message_id"
],
"tableTo": "message",
"columnsTo": ["id"],
"columnsTo": [
"id"
],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
@@ -1095,9 +1113,13 @@
"table": "part"
},
{
"columns": ["project_id"],
"columns": [
"project_id"
],
"tableTo": "project",
"columnsTo": ["id"],
"columnsTo": [
"id"
],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
@@ -1106,20 +1128,28 @@
"table": "permission"
},
{
"columns": ["session_id"],
"columns": [
"session_id"
],
"tableTo": "session",
"columnsTo": ["id"],
"columnsTo": [
"id"
],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
"name": "fk_session_entry_session_id_session_id_fk",
"name": "fk_session_message_session_id_session_id_fk",
"entityType": "fks",
"table": "session_entry"
"table": "session_message"
},
{
"columns": ["project_id"],
"columns": [
"project_id"
],
"tableTo": "project",
"columnsTo": ["id"],
"columnsTo": [
"id"
],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
@@ -1128,9 +1158,13 @@
"table": "session"
},
{
"columns": ["session_id"],
"columns": [
"session_id"
],
"tableTo": "session",
"columnsTo": ["id"],
"columnsTo": [
"id"
],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
@@ -1139,9 +1173,13 @@
"table": "todo"
},
{
"columns": ["session_id"],
"columns": [
"session_id"
],
"tableTo": "session",
"columnsTo": ["id"],
"columnsTo": [
"id"
],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
@@ -1150,9 +1188,13 @@
"table": "session_share"
},
{
"columns": ["aggregate_id"],
"columns": [
"aggregate_id"
],
"tableTo": "event_sequence",
"columnsTo": ["aggregate_id"],
"columnsTo": [
"aggregate_id"
],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
@@ -1161,98 +1203,128 @@
"table": "event"
},
{
"columns": ["email", "url"],
"columns": [
"email",
"url"
],
"nameExplicit": false,
"name": "control_account_pk",
"entityType": "pks",
"table": "control_account"
},
{
"columns": ["session_id", "position"],
"columns": [
"session_id",
"position"
],
"nameExplicit": false,
"name": "todo_pk",
"entityType": "pks",
"table": "todo"
},
{
"columns": ["id"],
"columns": [
"id"
],
"nameExplicit": false,
"name": "account_state_pk",
"table": "account_state",
"entityType": "pks"
},
{
"columns": ["id"],
"columns": [
"id"
],
"nameExplicit": false,
"name": "account_pk",
"table": "account",
"entityType": "pks"
},
{
"columns": ["id"],
"columns": [
"id"
],
"nameExplicit": false,
"name": "workspace_pk",
"table": "workspace",
"entityType": "pks"
},
{
"columns": ["id"],
"columns": [
"id"
],
"nameExplicit": false,
"name": "project_pk",
"table": "project",
"entityType": "pks"
},
{
"columns": ["id"],
"columns": [
"id"
],
"nameExplicit": false,
"name": "message_pk",
"table": "message",
"entityType": "pks"
},
{
"columns": ["id"],
"columns": [
"id"
],
"nameExplicit": false,
"name": "part_pk",
"table": "part",
"entityType": "pks"
},
{
"columns": ["project_id"],
"columns": [
"project_id"
],
"nameExplicit": false,
"name": "permission_pk",
"table": "permission",
"entityType": "pks"
},
{
"columns": ["id"],
"columns": [
"id"
],
"nameExplicit": false,
"name": "session_entry_pk",
"table": "session_entry",
"name": "session_message_pk",
"table": "session_message",
"entityType": "pks"
},
{
"columns": ["id"],
"columns": [
"id"
],
"nameExplicit": false,
"name": "session_pk",
"table": "session",
"entityType": "pks"
},
{
"columns": ["session_id"],
"columns": [
"session_id"
],
"nameExplicit": false,
"name": "session_share_pk",
"table": "session_share",
"entityType": "pks"
},
{
"columns": ["aggregate_id"],
"columns": [
"aggregate_id"
],
"nameExplicit": false,
"name": "event_sequence_pk",
"table": "event_sequence",
"entityType": "pks"
},
{
"columns": ["id"],
"columns": [
"id"
],
"nameExplicit": false,
"name": "event_pk",
"table": "event",
@@ -1322,9 +1394,9 @@
"isUnique": false,
"where": null,
"origin": "manual",
"name": "session_entry_session_idx",
"name": "session_message_session_idx",
"entityType": "indexes",
"table": "session_entry"
"table": "session_message"
},
{
"columns": [
@@ -1340,9 +1412,9 @@
"isUnique": false,
"where": null,
"origin": "manual",
"name": "session_entry_session_type_idx",
"name": "session_message_session_type_idx",
"entityType": "indexes",
"table": "session_entry"
"table": "session_message"
},
{
"columns": [
@@ -1354,9 +1426,9 @@
"isUnique": false,
"where": null,
"origin": "manual",
"name": "session_entry_time_created_idx",
"name": "session_message_time_created_idx",
"entityType": "indexes",
"table": "session_entry"
"table": "session_message"
},
{
"columns": [

View File

@@ -0,0 +1,2 @@
ALTER TABLE `session` ADD `agent` text;--> statement-breakpoint
ALTER TABLE `session` ADD `model` text;

File diff suppressed because it is too large Load Diff

View File

@@ -1,342 +0,0 @@
# Config Lifecycle Plan
## Goal
Remove instance disposal from `Config` so config loading/writing stays a pure config concern and runtime lifecycle invalidation happens at the caller/orchestration boundary.
This specifically removes the need for `Config` to import or lazily import `InstanceRuntime`.
## Current Coupling
`src/config/config.ts` currently does three separate things:
1. Load and cache global config.
2. Load, merge, and write project/global config files.
3. Dispose instances when config changes.
The third responsibility is the problem.
Current disposal paths:
1. `Config.update(config)` writes project `config.json`, then disposes the active instance unless `options.dispose === false`.
2. `Config.updateGlobal(config)` writes global config, then calls `Config.invalidate()` if the file changed.
3. `Config.invalidate(wait)` invalidates the global config cache, disposes all instances, and emits a global disposed event.
## Desired Ownership
`Config` should own:
1. Reading config files.
2. Parsing and merging config.
3. Writing project/global config files.
4. Invalidating only its own global config cache.
Callers should own:
1. Disposing the current instance after a project config update.
2. Disposing all instances after a global config update or explicit reload.
3. Emitting server/global lifecycle events after disposal.
## Concrete API Changes
### `src/config/config.ts`
1. Remove `loadInstanceRuntime()`.
2. Remove `InstanceRuntime`/`InstanceStore`/lifecycle imports from config.
3. Change `Interface.update` from:
```ts
readonly update: (config: Info, options?: { dispose?: boolean }) => Effect.Effect<void>
```
to:
```ts
readonly update: (config: Info) => Effect.Effect<void>
```
4. Change `Config.update` implementation to only write the project `config.json`.
5. Change `Interface.invalidate` to a config-only cache invalidation method, or rename it for clarity.
Preferred final shape:
```ts
readonly invalidate: () => Effect.Effect<void>
```
`invalidate()` should only run `invalidateGlobal`.
6. Change `Config.updateGlobal` to write global config, invalidate only config-global cache when changed, and return whether the file changed.
Preferred final shape:
```ts
readonly updateGlobal: (config: Info) => Effect.Effect<{ info: Info; changed: boolean }>
```
Implementation detail:
```ts
if (changed) yield* invalidate()
return { info: next, changed }
```
Public API routes should still return only `result.info`; `changed` is for lifecycle orchestration only.
## Caller Updates
### Legacy instance config route
File: `src/server/routes/instance/config.ts`
Current:
```ts
const cfg = yield* Config.Service
yield* cfg.update(config)
return config
```
Change to:
```ts
const cfg = yield* Config.Service
yield* cfg.update(config)
const store = yield* InstanceStore.Service
yield* store.dispose(Instance.current)
return config
```
Imports needed:
```ts
import { Instance } from "@/project/instance"
import { InstanceStore } from "@/project/instance-store"
```
Rationale: this route is an instance-scoped orchestration boundary, so it should own the instance disposal after writing project config.
### Effect HttpApi instance config route
File: `src/server/routes/instance/httpapi/handlers/config.ts`
Current:
```ts
yield* configSvc.update(ctx.payload, { dispose: false })
yield* markInstanceForDisposal(yield* InstanceState.context)
return ctx.payload
```
Change to:
```ts
yield* configSvc.update(ctx.payload)
yield* markInstanceForDisposal(yield* InstanceState.context)
return ctx.payload
```
Rationale: this route already has correct ownership. It writes config first, then delegates disposal to HttpApi lifecycle middleware.
### Legacy global config route
File: `src/server/routes/global.ts`
Current:
```ts
const next = await AppRuntime.runPromise(Config.Service.use((cfg) => cfg.updateGlobal(config)))
return c.json(next)
```
Change to run config write, then if the file changed, schedule the same dispose-all/global-disposed side effect that `Config.invalidate(false)` currently schedules.
Important behavior to preserve:
1. Do not dispose instances when the serialized global config did not change.
2. Do not make the HTTP response wait for instance disposal. Current `updateGlobal -> invalidate()` schedules disposal asynchronously when `wait` is omitted.
Preferred implementation shape:
```ts
const result = await AppRuntime.runPromise(
Effect.gen(function* () {
const cfg = yield* Config.Service
return yield* cfg.updateGlobal(config)
}),
)
if (result.changed) void AppRuntime.runPromise(disposeAllInstancesAndEmitGlobalDisposed).catch(() => undefined)
return c.json(result.info)
```
Imports needed:
```ts
import { Effect } from "effect"
import { disposeAllInstancesAndEmitGlobalDisposed } from "@/server/global-lifecycle"
```
`src/server/routes/global.ts` already defines `GlobalDisposedEvent`; move that event definition to the shared helper module or re-export it from there so `/dispose`, legacy global config update, and HttpApi global config update use one event source.
### Effect HttpApi global config route
File: `src/server/routes/instance/httpapi/handlers/global.ts`
Current:
```ts
return yield* config.updateGlobal(ctx.payload)
```
Change to preserve the existing changed-only and async-disposal semantics:
```ts
const result = yield* config.updateGlobal(ctx.payload)
if (result.changed) bridge.fork(disposeAllInstancesAndEmitGlobalDisposed({ swallowErrors: true }))
return result.info
```
Imports needed:
```ts
import { EffectBridge } from "@/effect/bridge"
import { disposeAllInstancesAndEmitGlobalDisposed } from "@/server/global-lifecycle"
```
Also yield a stable bridge at handler construction:
```ts
const bridge = yield* EffectBridge.make()
```
Do not use `Effect.forkScoped` for this fire-and-forget disposal; the request scope can close before disposal finishes.
`src/server/routes/instance/httpapi/handlers/global.ts` already yields `InstanceStore.Service` for `/dispose`. Keep `/dispose` strict, or use the shared helper with `swallowErrors: false` so explicit disposal failures still surface.
### TUI worker reload
File: `src/cli/cmd/tui/worker.ts`
Current:
```ts
await AppRuntime.runPromise(Config.Service.use((cfg) => cfg.invalidate(true)))
```
Change to:
```ts
await AppRuntime.runPromise(
Effect.gen(function* () {
const cfg = yield* Config.Service
const store = yield* InstanceStore.Service
yield* cfg.invalidate()
yield* store.disposeAll()
}),
)
```
Imports needed:
```ts
import { Effect } from "effect"
import { InstanceStore } from "@/project/instance-store"
```
No global disposed event is required here unless existing TUI behavior depends on it. The current worker path only calls `Config.invalidate(true)` and does not directly interact with server event streams.
## Helper Extraction
If both global routes need identical "dispose all and emit global disposed" behavior, extract a helper outside `Config`.
Preferred location:
`src/server/global-lifecycle.ts`
Suggested helper:
```ts
export const emitGlobalDisposed = Effect.sync(() =>
GlobalBus.emit("event", {
directory: "global",
payload: {
type: Event.Disposed.type,
properties: {},
},
}),
)
export const disposeAllInstancesAndEmitGlobalDisposed = Effect.fn("Server.disposeAllInstancesAndEmitGlobalDisposed")(function* (options?: {
swallowErrors?: boolean
}) {
const store = yield* InstanceStore.Service
const dispose = store.disposeAll()
yield* (options?.swallowErrors ? dispose.pipe(Effect.catch(() => Effect.void)) : dispose)
yield* emitGlobalDisposed
})
```
Use this helper only from server/global route code and explicit reload/dispose orchestration. Do not import it into `Config`.
Use `swallowErrors: true` only for paths that previously swallowed disposal errors, such as config invalidation. Keep explicit `/dispose` strict by omitting `swallowErrors`.
## Tests To Update
### Config tests
File: `test/config/config.test.ts`
1. `save(...)`, `saveGlobal(...)`, and `clear(...)` helpers should still run against `Config.layer` only.
2. They should not need `InstanceRuntime`, `InstanceStore`, or no-op lifecycle mocks.
3. Existing config tests should continue to pass because config no longer disposes instances internally.
### TUI config tests
File: `test/config/tui.test.ts`
1. The `clear` helper currently calls `Config.invalidate` through `AppRuntime`.
2. After `invalidate()` is config-only, this is fine and should not dispose instances.
### Route behavior tests
Add or update focused tests for lifecycle ownership:
1. Legacy instance config route disposes only the active instance after project config update.
2. HttpApi instance config route still marks the active instance for disposal after project config update.
3. Legacy global config route disposes all instances after global config update.
4. HttpApi global config route disposes all instances after global config update.
5. Global config routes do not dispose instances when the config write is a no-op.
Prefer existing route tests if they already cover config update behavior. Do not add broad integration tests unless necessary.
Suggested new focused files if no existing test has the right harness:
1. `test/server/global-config.test.ts` for legacy Hono global config update lifecycle.
2. `test/server/httpapi-global-config.test.ts` for Effect HttpApi global config update lifecycle.
## Verification Commands
Run from `packages/opencode`:
```bash
bun typecheck
bun run test test/config/config.test.ts test/config/tui.test.ts
bun run test test/server/httpapi-config.test.ts test/server/httpapi-instance-context.test.ts test/server/httpapi-bridge.test.ts
bun run test test/server/global-config.test.ts test/server/httpapi-global-config.test.ts
bun run test test/project/instance-bootstrap-regression.test.ts test/agent/plugin-agent-regression.test.ts test/project/instance.test.ts
env -u OPENCODE_EXPERIMENTAL_WORKSPACES bun run test
```
## Non-Goals
1. Do not remove `InstanceRuntime` entirely in this change. It is still needed for legacy Promise/ALS callers.
2. Do not change `InstanceStore` bootstrap ownership.
3. Do not change config parsing/merging semantics.
4. Do not make `Config.layer` depend on `InstanceStore.Service`.
## Expected End State
1. `src/config/config.ts` has no import or dynamic import of `InstanceRuntime`, `InstanceStore`, or server lifecycle helpers.
2. `Config.update` and `Config.updateGlobal` only write config and invalidate config-owned caches.
3. Instance disposal is visible at route/worker orchestration boundaries.
4. Tests that exercise config parsing/writing no longer need special lifecycle stubs.

View File

@@ -51,6 +51,7 @@ import { LoadAPIKeyError } from "ai"
import type { AssistantMessage, Event, OpencodeClient, SessionMessageResponse, ToolPart } from "@opencode-ai/sdk/v2"
import { applyPatch } from "diff"
import { InstallationVersion } from "@opencode-ai/core/installation/version"
import { ShellToolID } from "@/tool/shell/id"
type ModeOption = { id: string; name: string; description?: string }
type ModelOption = { modelId: string; name: string }
@@ -144,7 +145,7 @@ export class Agent implements ACPAgent {
private sessionManager: ACPSessionManager
private eventAbort = new AbortController()
private eventStarted = false
private bashSnapshots = new Map<string, string>()
private shellSnapshots = new Map<string, string>()
private toolStarts = new Set<string>()
private permissionQueues = new Map<string, Promise<void>>()
private permissionOptions: PermissionOption[] = [
@@ -283,16 +284,16 @@ export class Agent implements ACPAgent {
switch (part.state.status) {
case "pending":
this.bashSnapshots.delete(part.callID)
this.shellSnapshots.delete(part.callID)
return
case "running":
const output = this.bashOutput(part)
const output = this.shellOutput(part)
const content: ToolCallContent[] = []
if (output) {
const hash = Hash.fast(output)
if (part.tool === "bash") {
if (this.bashSnapshots.get(part.callID) === hash) {
if (part.tool === ShellToolID.id) {
if (this.shellSnapshots.get(part.callID) === hash) {
await this.connection
.sessionUpdate({
sessionId,
@@ -311,7 +312,7 @@ export class Agent implements ACPAgent {
})
return
}
this.bashSnapshots.set(part.callID, hash)
this.shellSnapshots.set(part.callID, hash)
}
content.push({
type: "content",
@@ -342,7 +343,7 @@ export class Agent implements ACPAgent {
case "completed": {
this.toolStarts.delete(part.callID)
this.bashSnapshots.delete(part.callID)
this.shellSnapshots.delete(part.callID)
const kind = toToolKind(part.tool)
const content: ToolCallContent[] = [
{
@@ -423,7 +424,7 @@ export class Agent implements ACPAgent {
}
case "error":
this.toolStarts.delete(part.callID)
this.bashSnapshots.delete(part.callID)
this.shellSnapshots.delete(part.callID)
await this.connection
.sessionUpdate({
sessionId,
@@ -837,10 +838,10 @@ export class Agent implements ACPAgent {
await this.toolStart(sessionId, part)
switch (part.state.status) {
case "pending":
this.bashSnapshots.delete(part.callID)
this.shellSnapshots.delete(part.callID)
break
case "running":
const output = this.bashOutput(part)
const output = this.shellOutput(part)
const runningContent: ToolCallContent[] = []
if (output) {
runningContent.push({
@@ -871,7 +872,7 @@ export class Agent implements ACPAgent {
break
case "completed":
this.toolStarts.delete(part.callID)
this.bashSnapshots.delete(part.callID)
this.shellSnapshots.delete(part.callID)
const kind = toToolKind(part.tool)
const content: ToolCallContent[] = [
{
@@ -951,7 +952,7 @@ export class Agent implements ACPAgent {
break
case "error":
this.toolStarts.delete(part.callID)
this.bashSnapshots.delete(part.callID)
this.shellSnapshots.delete(part.callID)
await this.connection
.sessionUpdate({
sessionId,
@@ -1105,8 +1106,8 @@ export class Agent implements ACPAgent {
}
}
private bashOutput(part: ToolPart) {
if (part.tool !== "bash") return
private shellOutput(part: ToolPart) {
if (part.tool !== ShellToolID.id) return
if (!("metadata" in part.state) || !part.state.metadata || typeof part.state.metadata !== "object") return
const output = part.state.metadata["output"]
if (typeof output !== "string") return
@@ -1549,9 +1550,11 @@ export class Agent implements ACPAgent {
function toToolKind(toolName: string): ToolKind {
const tool = toolName.toLocaleLowerCase()
switch (tool) {
case "bash":
case ShellToolID.id:
return "execute"
case "webfetch":
return "fetch"
@@ -1576,6 +1579,7 @@ function toToolKind(toolName: string): ToolKind {
function toLocations(toolName: string, input: Record<string, any>): { path: string }[] {
const tool = toolName.toLocaleLowerCase()
switch (tool) {
case "read":
case "edit":
@@ -1584,7 +1588,7 @@ function toLocations(toolName: string, input: Record<string, any>): { path: stri
case "glob":
case "grep":
return input["path"] ? [{ path: input["path"] }] : []
case "bash":
case ShellToolID.id:
return []
default:
return []

View File

@@ -24,6 +24,7 @@ export function payloads() {
.map(([type, def]) => {
return z
.object({
id: z.string(),
type: z.literal(type),
properties: zodObject(def.properties),
})
@@ -39,6 +40,7 @@ export function effectPayloads() {
.entries()
.map(([type, def]) =>
Schema.Struct({
id: Schema.String,
type: Schema.Literal(type),
properties: def.properties,
}).annotate({ identifier: `Event.${type}` }),

View File

@@ -1,4 +1,5 @@
import { EventEmitter } from "events"
import { Identifier } from "@/id/id"
export type GlobalEvent = {
directory?: string
@@ -7,6 +8,15 @@ export type GlobalEvent = {
payload: any
}
export const GlobalBus = new EventEmitter<{
class GlobalBusEmitter extends EventEmitter<{
event: [GlobalEvent]
}>()
}> {
override emit(eventName: "event", event: GlobalEvent): boolean {
if (event.payload && typeof event.payload === "object" && !("id" in event.payload)) {
event.payload.id = event.payload.syncEvent?.id ?? Identifier.create("evt", "ascending")
}
return super.emit(eventName, event)
}
}
export const GlobalBus = new GlobalBusEmitter()

View File

@@ -5,6 +5,7 @@ import { BusEvent } from "./bus-event"
import { GlobalBus } from "./global"
import { InstanceState } from "@/effect/instance-state"
import { makeRuntime } from "@/effect/run-service"
import { Identifier } from "@/id/id"
const log = Log.create({ service: "bus" })
@@ -18,6 +19,7 @@ export const InstanceDisposed = BusEvent.define(
)
type Payload<D extends BusEvent.Definition = BusEvent.Definition> = {
id: string
type: D["type"]
properties: BusProperties<D>
}
@@ -28,7 +30,11 @@ type State = {
}
export interface Interface {
readonly publish: <D extends BusEvent.Definition>(def: D, properties: BusProperties<D>) => Effect.Effect<void>
readonly publish: <D extends BusEvent.Definition>(
def: D,
properties: BusProperties<D>,
options?: { id?: string },
) => Effect.Effect<void>
readonly subscribe: <D extends BusEvent.Definition>(def: D) => Stream.Stream<Payload<D>>
readonly subscribeAll: () => Stream.Stream<Payload>
readonly subscribeCallback: <D extends BusEvent.Definition>(
@@ -53,6 +59,7 @@ export const layer = Layer.effect(
// Publish InstanceDisposed before shutting down so subscribers see it
yield* PubSub.publish(wildcard, {
type: InstanceDisposed.type,
id: createID(),
properties: { directory: ctx.directory },
})
yield* PubSub.shutdown(wildcard)
@@ -77,10 +84,10 @@ export const layer = Layer.effect(
})
}
function publish<D extends BusEvent.Definition>(def: D, properties: BusProperties<D>) {
function publish<D extends BusEvent.Definition>(def: D, properties: BusProperties<D>, options?: { id?: string }) {
return Effect.gen(function* () {
const s = yield* InstanceState.get(state)
const payload: Payload = { type: def.type, properties }
const payload: Payload = { id: options?.id ?? createID(), type: def.type, properties }
log.info("publishing", { type: def.type })
const ps = s.typed.get(def.type)
@@ -173,8 +180,16 @@ const { runPromise, runSync } = makeRuntime(Service, layer)
// runSync is safe here because the subscribe chain (InstanceState.get, PubSub.subscribe,
// Scope.make, Effect.forkScoped) is entirely synchronous. If any step becomes async, this will throw.
export async function publish<D extends BusEvent.Definition>(def: D, properties: BusProperties<D>) {
return runPromise((svc) => svc.publish(def, properties))
export function createID() {
return Identifier.create("evt", "ascending")
}
export async function publish<D extends BusEvent.Definition>(
def: D,
properties: BusProperties<D>,
options?: { id?: string },
) {
return runPromise((svc) => svc.publish(def, properties, options))
}
export function subscribe<D extends BusEvent.Definition>(def: D, callback: (event: Payload<D>) => unknown) {

View File

@@ -1,15 +1,17 @@
import { Instance } from "../project/instance"
import { InstanceRuntime } from "../project/instance-runtime"
import { InstanceStore } from "../project/instance-store"
import { getBootstrapRunEffect } from "../effect/app-runtime"
export async function bootstrap<T>(directory: string, cb: () => Promise<T>) {
return Instance.provide({
directory,
init: await getBootstrapRunEffect(),
fn: async () => {
try {
const result = await cb()
return result
} finally {
await InstanceRuntime.disposeInstance(Instance.current)
await InstanceStore.disposeInstance(Instance.current)
}
},
})

View File

@@ -1,13 +1,13 @@
import type { Argv } from "yargs"
import { Session } from "@/session/session"
import { MessageV2 } from "../../session/message-v2"
import { SessionID } from "../../session/schema"
import { cmd } from "./cmd"
import { bootstrap } from "../bootstrap"
import { effectCmd, fail } from "../effect-cmd"
import { UI } from "../ui"
import * as prompts from "@clack/prompts"
import { EOL } from "os"
import { AppRuntime } from "@/effect/app-runtime"
import { Effect } from "effect"
import { InstanceRef } from "@/effect/instance-ref"
import { InstanceStore } from "@/project/instance-store"
function redact(kind: string, id: string, value: string) {
return value.trim() ? `[redacted:${kind}:${id}]` : value
@@ -220,11 +220,11 @@ function sanitize(data: { info: Session.Info; messages: MessageV2.WithParts[] })
}
}
export const ExportCommand = cmd({
export const ExportCommand = effectCmd({
command: "export [sessionID]",
describe: "export session data as JSON",
builder: (yargs: Argv) => {
return yargs
builder: (yargs) =>
yargs
.positional("sessionID", {
describe: "session id to export",
type: "string",
@@ -232,72 +232,65 @@ export const ExportCommand = cmd({
.option("sanitize", {
describe: "redact sensitive transcript and file data",
type: "boolean",
})
},
handler: async (args) => {
await bootstrap(process.cwd(), async () => {
let sessionID = args.sessionID ? SessionID.make(args.sessionID) : undefined
process.stderr.write(`Exporting session: ${sessionID ?? "latest"}\n`)
if (!sessionID) {
UI.empty()
prompts.intro("Export session", {
output: process.stderr,
})
const sessions = await AppRuntime.runPromise(Session.Service.use((svc) => svc.list()))
if (sessions.length === 0) {
prompts.log.error("No sessions found", {
output: process.stderr,
})
prompts.outro("Done", {
output: process.stderr,
})
return
}
sessions.sort((a, b) => b.time.updated - a.time.updated)
const selectedSession = await prompts.autocomplete({
message: "Select session to export",
maxItems: 10,
options: sessions.map((session) => ({
label: session.title,
value: session.id,
hint: `${new Date(session.time.updated).toLocaleString()}${session.id.slice(-8)}`,
})),
output: process.stderr,
})
if (prompts.isCancel(selectedSession)) {
throw new UI.CancelledError()
}
sessionID = selectedSession
prompts.outro("Exporting session...", {
output: process.stderr,
})
}
try {
const sessionInfo = await AppRuntime.runPromise(Session.Service.use((svc) => svc.get(sessionID!)))
const messages = await AppRuntime.runPromise(
Session.Service.use((svc) => svc.messages({ sessionID: sessionInfo.id })),
)
const exportData = {
info: sessionInfo,
messages,
}
process.stdout.write(JSON.stringify(args.sanitize ? sanitize(exportData) : exportData, null, 2))
process.stdout.write(EOL)
} catch {
UI.error(`Session not found: ${sessionID!}`)
process.exit(1)
}
})
},
}),
handler: Effect.fn("Cli.export")(function* (args) {
const ctx = yield* InstanceRef
if (!ctx) return
const store = yield* InstanceStore.Service
return yield* run(args).pipe(Effect.ensuring(store.dispose(ctx)))
}),
})
const run = Effect.fn("Cli.export.body")(function* (args: { sessionID?: string; sanitize?: boolean }) {
const svc = yield* Session.Service
let sessionID = args.sessionID ? SessionID.make(args.sessionID) : undefined
process.stderr.write(`Exporting session: ${sessionID ?? "latest"}\n`)
if (!sessionID) {
UI.empty()
prompts.intro("Export session", { output: process.stderr })
const sessions = yield* svc.list()
if (sessions.length === 0) {
prompts.log.error("No sessions found", { output: process.stderr })
prompts.outro("Done", { output: process.stderr })
return
}
sessions.sort((a, b) => b.time.updated - a.time.updated)
const selectedSession = yield* Effect.promise(() =>
prompts.autocomplete({
message: "Select session to export",
maxItems: 10,
options: sessions.map((session) => ({
label: session.title,
value: session.id,
hint: `${new Date(session.time.updated).toLocaleString()}${session.id.slice(-8)}`,
})),
output: process.stderr,
}),
)
if (prompts.isCancel(selectedSession)) {
return yield* Effect.die(new UI.CancelledError())
}
sessionID = selectedSession
prompts.outro("Exporting session...", { output: process.stderr })
}
// Match legacy try/catch — catches both typed failures and defects
// (Session.Service.get throws NotFoundError as a defect, not a typed E).
return yield* Effect.gen(function* () {
const sessionInfo = yield* svc.get(sessionID!)
const messages = yield* svc.messages({ sessionID: sessionInfo.id })
const exportData = { info: sessionInfo, messages }
process.stdout.write(JSON.stringify(args.sanitize ? sanitize(exportData) : exportData, null, 2))
process.stdout.write(EOL)
}).pipe(Effect.catchCause(() => fail(`Session not found: ${sessionID!}`)))
})

View File

@@ -879,7 +879,7 @@ export const GithubRunCommand = cmd({
function subscribeSessionEvents() {
const TOOL: Record<string, [string, string]> = {
todowrite: ["Todo", UI.Style.TEXT_WARNING_BOLD],
bash: ["Bash", UI.Style.TEXT_DANGER_BOLD],
bash: ["Shell", UI.Style.TEXT_DANGER_BOLD],
edit: ["Edit", UI.Style.TEXT_SUCCESS_BOLD],
glob: ["Glob", UI.Style.TEXT_INFO_BOLD],
grep: ["Grep", UI.Style.TEXT_INFO_BOLD],

View File

@@ -1,17 +1,15 @@
import type { Argv } from "yargs"
import type { Session as SDKSession, Message, Part } from "@opencode-ai/sdk/v2"
import { Session } from "@/session/session"
import { MessageV2 } from "../../session/message-v2"
import { cmd } from "./cmd"
import { bootstrap } from "../bootstrap"
import { CliError, effectCmd } from "../effect-cmd"
import { Database } from "@/storage/db"
import { SessionTable, MessageTable, PartTable } from "../../session/session.sql"
import { Instance } from "../../project/instance"
import { InstanceRef } from "@/effect/instance-ref"
import { InstanceStore } from "@/project/instance-store"
import { ShareNext } from "@/share/share-next"
import { EOL } from "os"
import { Filesystem } from "@/util/filesystem"
import { AppRuntime } from "@/effect/app-runtime"
import { Schema } from "effect"
import { Effect, Schema } from "effect"
const decodeMessageInfo = Schema.decodeUnknownSync(MessageV2.Info)
const decodePart = Schema.decodeUnknownSync(MessageV2.Part)
@@ -78,135 +76,147 @@ export function transformShareData(shareData: ShareData[]): {
}
}
export const ImportCommand = cmd({
type ExportData = { info: SDKSession; messages: Array<{ info: Message; parts: Part[] }> }
export const ImportCommand = effectCmd({
command: "import <file>",
describe: "import session data from JSON file or URL",
builder: (yargs: Argv) => {
return yargs.positional("file", {
builder: (yargs) =>
yargs.positional("file", {
describe: "path to JSON file or share URL",
type: "string",
demandOption: true,
}),
handler: Effect.fn("Cli.import")(function* (args) {
// effectCmd always provides InstanceRef via InstanceStore.Service.provide; this is an invariant.
const ctx = yield* InstanceRef
if (!ctx) return yield* Effect.die("InstanceRef not provided")
const store = yield* InstanceStore.Service
// Ensure store.dispose runs disposers and emits server.instance.disposed
// on every exit path: success, early return, typed failure, defect, interrupt.
return yield* runImport(args.file, ctx.project.id).pipe(Effect.ensuring(store.dispose(ctx)))
}),
})
const runImport = Effect.fn("Cli.import.body")(function* (file: string, projectID: string) {
const share = yield* ShareNext.Service
let exportData: ExportData | undefined
const isUrl = file.startsWith("http://") || file.startsWith("https://")
if (isUrl) {
const slug = parseShareUrl(file)
if (!slug) {
const baseUrl = yield* Effect.orDie(share.url())
process.stdout.write(`Invalid URL format. Expected: ${baseUrl}/share/<slug>`)
process.stdout.write(EOL)
return
}
const baseUrl = new URL(file).origin
const req = yield* Effect.orDie(share.request())
const headers = shouldAttachShareAuthHeaders(file, req.baseUrl) ? req.headers : {}
const tryFetch = (url: string) =>
Effect.tryPromise({
try: () => fetch(url, { headers }),
catch: (e) =>
new CliError({
message: `Failed to fetch share data: ${e instanceof Error ? e.message : String(e)}`,
}),
})
const dataPath = req.api.data(slug)
let response = yield* tryFetch(`${baseUrl}${dataPath}`)
if (!response.ok && dataPath !== `/api/share/${slug}/data`) {
response = yield* tryFetch(`${baseUrl}/api/share/${slug}/data`)
}
if (!response.ok) {
process.stdout.write(`Failed to fetch share data: ${response.statusText}`)
process.stdout.write(EOL)
return
}
const shareData = yield* Effect.tryPromise({
try: () => response.json() as Promise<ShareData[]>,
catch: () => new CliError({ message: "Share data was not valid JSON" }),
})
},
handler: async (args) => {
await bootstrap(process.cwd(), async () => {
let exportData:
| {
info: SDKSession
messages: Array<{
info: Message
parts: Part[]
}>
}
| undefined
const transformed = transformShareData(shareData)
const isUrl = args.file.startsWith("http://") || args.file.startsWith("https://")
if (!transformed) {
process.stdout.write(`Share not found or empty: ${slug}`)
process.stdout.write(EOL)
return
}
if (isUrl) {
const slug = parseShareUrl(args.file)
if (!slug) {
const baseUrl = await AppRuntime.runPromise(ShareNext.Service.use((svc) => svc.url()))
process.stdout.write(`Invalid URL format. Expected: ${baseUrl}/share/<slug>`)
process.stdout.write(EOL)
return
}
exportData = transformed
} else {
exportData = yield* Effect.promise(() =>
Filesystem.readJson<NonNullable<typeof exportData>>(file).catch(() => undefined),
)
if (!exportData) {
process.stdout.write(`File not found: ${file}`)
process.stdout.write(EOL)
return
}
}
const parsed = new URL(args.file)
const baseUrl = parsed.origin
const req = await AppRuntime.runPromise(ShareNext.Service.use((svc) => svc.request()))
const headers = shouldAttachShareAuthHeaders(args.file, req.baseUrl) ? req.headers : {}
if (!exportData) {
process.stdout.write(`Failed to read session data`)
process.stdout.write(EOL)
return
}
const dataPath = req.api.data(slug)
let response = await fetch(`${baseUrl}${dataPath}`, {
headers,
const info = Schema.decodeUnknownSync(Session.Info)({
...exportData.info,
projectID,
}) as Session.Info
const row = Session.toRow(info)
Database.use((db) =>
db
.insert(SessionTable)
.values(row)
.onConflictDoUpdate({ target: SessionTable.id, set: { project_id: row.project_id } })
.run(),
)
for (const msg of exportData.messages) {
const msgInfo = decodeMessageInfo(msg.info) as MessageV2.Info
const { id, sessionID: _, ...msgData } = msgInfo
Database.use((db) =>
db
.insert(MessageTable)
.values({
id,
session_id: row.id,
time_created: msgInfo.time?.created ?? Date.now(),
data: msgData,
})
.onConflictDoNothing()
.run(),
)
if (!response.ok && dataPath !== `/api/share/${slug}/data`) {
response = await fetch(`${baseUrl}/api/share/${slug}/data`, {
headers,
})
}
if (!response.ok) {
process.stdout.write(`Failed to fetch share data: ${response.statusText}`)
process.stdout.write(EOL)
return
}
const shareData: ShareData[] = await response.json()
const transformed = transformShareData(shareData)
if (!transformed) {
process.stdout.write(`Share not found or empty: ${slug}`)
process.stdout.write(EOL)
return
}
exportData = transformed
} else {
exportData = await Filesystem.readJson<NonNullable<typeof exportData>>(args.file).catch(() => undefined)
if (!exportData) {
process.stdout.write(`File not found: ${args.file}`)
process.stdout.write(EOL)
return
}
}
if (!exportData) {
process.stdout.write(`Failed to read session data`)
process.stdout.write(EOL)
return
}
const info = Schema.decodeUnknownSync(Session.Info)({
...exportData.info,
projectID: Instance.project.id,
}) as Session.Info
const row = Session.toRow(info)
for (const part of msg.parts) {
const partInfo = decodePart(part) as MessageV2.Part
const { id: partId, sessionID: _s, messageID, ...partData } = partInfo
Database.use((db) =>
db
.insert(SessionTable)
.values(row)
.onConflictDoUpdate({ target: SessionTable.id, set: { project_id: row.project_id } })
.insert(PartTable)
.values({
id: partId,
message_id: messageID,
session_id: row.id,
data: partData,
})
.onConflictDoNothing()
.run(),
)
}
}
for (const msg of exportData.messages) {
const msgInfo = decodeMessageInfo(msg.info) as MessageV2.Info
const { id, sessionID: _, ...msgData } = msgInfo
Database.use((db) =>
db
.insert(MessageTable)
.values({
id,
session_id: row.id,
time_created: msgInfo.time?.created ?? Date.now(),
data: msgData,
})
.onConflictDoNothing()
.run(),
)
for (const part of msg.parts) {
const partInfo = decodePart(part) as MessageV2.Part
const { id: partId, sessionID: _s, messageID, ...partData } = partInfo
Database.use((db) =>
db
.insert(PartTable)
.values({
id: partId,
message_id: messageID,
session_id: row.id,
data: partData,
})
.onConflictDoNothing()
.run(),
)
}
}
process.stdout.write(`Imported session: ${exportData.info.id}`)
process.stdout.write(EOL)
})
},
process.stdout.write(`Imported session: ${exportData.info.id}`)
process.stdout.write(EOL)
})

View File

@@ -1,16 +1,16 @@
import { intro, log, outro, spinner } from "@clack/prompts"
import type { Argv } from "yargs"
import { Effect } from "effect"
import { ConfigPaths } from "@/config/paths"
import { Global } from "@opencode-ai/core/global"
import { installPlugin, patchPluginConfig, readPluginManifest } from "../../plugin/install"
import { resolvePluginTarget } from "../../plugin/shared"
import { Instance } from "../../project/instance"
import { errorMessage } from "../../util/error"
import { Filesystem } from "@/util/filesystem"
import { Process } from "@/util/process"
import { UI } from "../ui"
import { cmd } from "./cmd"
import { effectCmd } from "../effect-cmd"
import { InstanceRef } from "@/effect/instance-ref"
type Spin = {
start: (msg: string) => void
@@ -175,12 +175,12 @@ export function createPlugTask(input: PlugInput, dep: PlugDeps = defaultPlugDeps
}
}
export const PluginCommand = cmd({
export const PluginCommand = effectCmd({
command: "plugin <module>",
aliases: ["plug"],
describe: "install plugin and update config",
builder: (yargs: Argv) => {
return yargs
builder: (yargs) =>
yargs
.positional("module", {
type: "string",
describe: "npm module name",
@@ -196,9 +196,8 @@ export const PluginCommand = cmd({
type: "boolean",
default: false,
describe: "replace existing plugin version",
})
},
handler: async (args) => {
}),
handler: Effect.fn("Cli.plug")(function* (args) {
const mod = String(args.module ?? "").trim()
if (!mod) {
UI.error("module is required")
@@ -214,20 +213,18 @@ export const PluginCommand = cmd({
global: Boolean(args.global),
force: Boolean(args.force),
})
let ok = true
await Instance.provide({
directory: process.cwd(),
fn: async () => {
ok = await run({
vcs: Instance.project.vcs,
worktree: Instance.worktree,
directory: Instance.directory,
})
},
})
const ctx = yield* InstanceRef
if (!ctx) return
const ok = yield* Effect.promise(() =>
run({
vcs: ctx.project.vcs,
worktree: ctx.worktree,
directory: ctx.directory,
}),
)
outro("Done")
if (!ok) process.exitCode = 1
},
}),
})

View File

@@ -1,11 +1,11 @@
import { Effect } from "effect"
import { UI } from "../ui"
import { cmd } from "./cmd"
import { AppRuntime } from "@/effect/app-runtime"
import { effectCmd, fail } from "../effect-cmd"
import { Git } from "@/git"
import { Instance } from "@/project/instance"
import { InstanceRef } from "@/effect/instance-ref"
import { Process } from "@/util/process"
export const PrCommand = cmd({
export const PrCommand = effectCmd({
command: "pr <number>",
describe: "fetch and checkout a GitHub PR branch, then run opencode",
builder: (yargs) =>
@@ -14,125 +14,102 @@ export const PrCommand = cmd({
describe: "PR number to checkout",
demandOption: true,
}),
async handler(args) {
await Instance.provide({
directory: process.cwd(),
async fn() {
const project = Instance.project
if (project.vcs !== "git") {
UI.error("Could not find git repository. Please run this command from a git repository.")
process.exit(1)
handler: Effect.fn("Cli.pr")(function* (args) {
const ctx = yield* InstanceRef
if (!ctx) return yield* fail("Could not load instance context")
if (ctx.project.vcs !== "git") {
return yield* fail("Could not find git repository. Please run this command from a git repository.")
}
const git = yield* Git.Service
const worktree = ctx.worktree
const prNumber = args.number
const localBranchName = `pr/${prNumber}`
UI.println(`Fetching and checking out PR #${prNumber}...`)
const checkout = yield* Effect.promise(() =>
Process.run(["gh", "pr", "checkout", `${prNumber}`, "--branch", localBranchName, "--force"], { nothrow: true }),
)
if (checkout.code !== 0) {
return yield* fail(`Failed to checkout PR #${prNumber}. Make sure you have gh CLI installed and authenticated.`)
}
const prInfoResult = yield* Effect.promise(() =>
Process.text(
[
"gh",
"pr",
"view",
`${prNumber}`,
"--json",
"headRepository,headRepositoryOwner,isCrossRepository,headRefName,body",
],
{ nothrow: true },
),
)
let sessionId: string | undefined
if (prInfoResult.code === 0 && prInfoResult.text.trim()) {
const prInfo = JSON.parse(prInfoResult.text)
if (prInfo?.isCrossRepository && prInfo.headRepository && prInfo.headRepositoryOwner) {
const forkOwner = prInfo.headRepositoryOwner.login
const forkName = prInfo.headRepository.name
const remoteName = forkOwner
const remotes = (yield* git.run(["remote"], { cwd: worktree })).text().trim()
if (!remotes.split("\n").includes(remoteName)) {
yield* git.run(["remote", "add", remoteName, `https://github.com/${forkOwner}/${forkName}.git`], {
cwd: worktree,
})
UI.println(`Added fork remote: ${remoteName}`)
}
const prNumber = args.number
const localBranchName = `pr/${prNumber}`
UI.println(`Fetching and checking out PR #${prNumber}...`)
yield* git.run(["branch", `--set-upstream-to=${remoteName}/${prInfo.headRefName}`, localBranchName], {
cwd: worktree,
})
}
// Use gh pr checkout with custom branch name
const result = await Process.run(
["gh", "pr", "checkout", `${prNumber}`, "--branch", localBranchName, "--force"],
{
nothrow: true,
},
)
if (prInfo?.body) {
const sessionMatch = prInfo.body.match(/https:\/\/opncd\.ai\/s\/([a-zA-Z0-9_-]+)/)
if (sessionMatch) {
const sessionUrl = sessionMatch[0]
UI.println(`Found opencode session: ${sessionUrl}`)
UI.println(`Importing session...`)
if (result.code !== 0) {
UI.error(`Failed to checkout PR #${prNumber}. Make sure you have gh CLI installed and authenticated.`)
process.exit(1)
}
// Fetch PR info for fork handling and session link detection
const prInfoResult = await Process.text(
[
"gh",
"pr",
"view",
`${prNumber}`,
"--json",
"headRepository,headRepositoryOwner,isCrossRepository,headRefName,body",
],
{ nothrow: true },
)
let sessionId: string | undefined
if (prInfoResult.code === 0) {
const prInfoText = prInfoResult.text
if (prInfoText.trim()) {
const prInfo = JSON.parse(prInfoText)
// Handle fork PRs
if (prInfo && prInfo.isCrossRepository && prInfo.headRepository && prInfo.headRepositoryOwner) {
const forkOwner = prInfo.headRepositoryOwner.login
const forkName = prInfo.headRepository.name
const remoteName = forkOwner
// Check if remote already exists
const remotes = await AppRuntime.runPromise(
Git.Service.use((git) => git.run(["remote"], { cwd: Instance.worktree })),
).then((x) => x.text().trim())
if (!remotes.split("\n").includes(remoteName)) {
await AppRuntime.runPromise(
Git.Service.use((git) =>
git.run(["remote", "add", remoteName, `https://github.com/${forkOwner}/${forkName}.git`], {
cwd: Instance.worktree,
}),
),
)
UI.println(`Added fork remote: ${remoteName}`)
}
// Set upstream to the fork so pushes go there
const headRefName = prInfo.headRefName
await AppRuntime.runPromise(
Git.Service.use((git) =>
git.run(["branch", `--set-upstream-to=${remoteName}/${headRefName}`, localBranchName], {
cwd: Instance.worktree,
}),
),
)
}
// Check for opencode session link in PR body
if (prInfo && prInfo.body) {
const sessionMatch = prInfo.body.match(/https:\/\/opncd\.ai\/s\/([a-zA-Z0-9_-]+)/)
if (sessionMatch) {
const sessionUrl = sessionMatch[0]
UI.println(`Found opencode session: ${sessionUrl}`)
UI.println(`Importing session...`)
const importResult = await Process.text(["opencode", "import", sessionUrl], {
nothrow: true,
})
if (importResult.code === 0) {
const importOutput = importResult.text.trim()
// Extract session ID from the output (format: "Imported session: <session-id>")
const sessionIdMatch = importOutput.match(/Imported session: ([a-zA-Z0-9_-]+)/)
if (sessionIdMatch) {
sessionId = sessionIdMatch[1]
UI.println(`Session imported: ${sessionId}`)
}
}
}
const importResult = yield* Effect.promise(() =>
Process.text(["opencode", "import", sessionUrl], { nothrow: true }),
)
if (importResult.code === 0) {
const sessionIdMatch = importResult.text.trim().match(/Imported session: ([a-zA-Z0-9_-]+)/)
if (sessionIdMatch) {
sessionId = sessionIdMatch[1]
UI.println(`Session imported: ${sessionId}`)
}
}
}
}
}
UI.println(`Successfully checked out PR #${prNumber} as branch '${localBranchName}'`)
UI.println()
UI.println("Starting opencode...")
UI.println()
UI.println(`Successfully checked out PR #${prNumber} as branch '${localBranchName}'`)
UI.println()
UI.println("Starting opencode...")
UI.println()
const opencodeArgs = sessionId ? ["-s", sessionId] : []
const opencodeProcess = Process.spawn(["opencode", ...opencodeArgs], {
const opencodeArgs = sessionId ? ["-s", sessionId] : []
const code = yield* Effect.promise(
() =>
Process.spawn(["opencode", ...opencodeArgs], {
stdin: "inherit",
stdout: "inherit",
stderr: "inherit",
cwd: process.cwd(),
})
const code = await opencodeProcess.exited
if (code !== 0) throw new Error(`opencode exited with code ${code}`)
},
})
},
}).exited,
)
// Match legacy throw semantics — propagate as a defect so the top-level
// index.ts catch handles it identically (exit 1, "Unexpected error" banner).
if (code !== 0) return yield* Effect.die(new Error(`opencode exited with code ${code}`))
}),
})

View File

@@ -22,7 +22,8 @@ import { WriteTool } from "../../tool/write"
import { WebSearchTool } from "../../tool/websearch"
import { TaskTool } from "../../tool/task"
import { SkillTool } from "../../tool/skill"
import { BashTool } from "../../tool/bash"
import { ShellTool } from "../../tool/shell"
import { ShellToolID } from "../../tool/shell/id"
import { TodoWriteTool } from "../../tool/todo"
import { Locale } from "@/util/locale"
import { AppRuntime } from "@/effect/app-runtime"
@@ -175,7 +176,7 @@ function skill(info: ToolProps<typeof SkillTool>) {
})
}
function bash(info: ToolProps<typeof BashTool>) {
function shell(info: ToolProps<typeof ShellTool>) {
const output = info.part.state.status === "completed" ? info.part.state.output?.trim() : undefined
block(
{
@@ -359,6 +360,11 @@ export const RunCommand = cmd({
action: "deny",
pattern: "*",
},
{
permission: "edit",
action: "allow",
pattern: "*",
},
]
function title() {
@@ -400,7 +406,7 @@ export const RunCommand = cmd({
async function execute(sdk: OpencodeClient) {
function tool(part: ToolPart) {
try {
if (part.tool === "bash") return bash(props<typeof BashTool>(part))
if (part.tool === ShellToolID.id) return shell(props<typeof ShellTool>(part))
if (part.tool === "glob") return glob(props<typeof GlobTool>(part))
if (part.tool === "grep") return grep(props<typeof GrepTool>(part))
if (part.tool === "read") return read(props<typeof ReadTool>(part))

View File

@@ -1,6 +1,7 @@
import { Server } from "../../server/server"
import { cmd } from "./cmd"
import { withNetworkOptions, resolveNetworkOptions } from "../network"
import { bootstrap } from "../bootstrap"
import { Flag } from "@opencode-ai/core/flag/flag"
export const ServeCommand = cmd({
@@ -11,7 +12,8 @@ export const ServeCommand = cmd({
if (!Flag.OPENCODE_SERVER_PASSWORD) {
console.log("Warning: OPENCODE_SERVER_PASSWORD is not set; server is unsecured.")
}
const opts = await resolveNetworkOptions(args)
const opts = await bootstrap(process.cwd(), () => resolveNetworkOptions(args))
const server = await Server.listen(opts)
console.log(`opencode server listening on http://${server.hostname}:${server.port}`)

View File

@@ -1,8 +1,9 @@
import type { Argv } from "yargs"
import { Effect } from "effect"
import { cmd } from "./cmd"
import { effectCmd, fail } from "../effect-cmd"
import { Session } from "@/session/session"
import { SessionID } from "../../session/schema"
import { bootstrap } from "../bootstrap"
import { UI } from "../ui"
import { Locale } from "@/util/locale"
import { Flag } from "@opencode-ai/core/flag/flag"
@@ -11,7 +12,8 @@ import { Process } from "@/util/process"
import { EOL } from "os"
import path from "path"
import { which } from "../../util/which"
import { AppRuntime } from "@/effect/app-runtime"
import { InstanceRef } from "@/effect/instance-ref"
import { InstanceStore } from "@/project/instance-store"
function pagerCmd(): string[] {
const lessOptions = ["-R", "-S"]
@@ -47,36 +49,35 @@ export const SessionCommand = cmd({
async handler() {},
})
export const SessionDeleteCommand = cmd({
export const SessionDeleteCommand = effectCmd({
command: "delete <sessionID>",
describe: "delete a session",
builder: (yargs: Argv) => {
return yargs.positional("sessionID", {
builder: (yargs) =>
yargs.positional("sessionID", {
describe: "session ID to delete",
type: "string",
demandOption: true,
})
},
handler: async (args) => {
await bootstrap(process.cwd(), async () => {
}),
handler: Effect.fn("Cli.session.delete")(function* (args) {
const ctx = yield* InstanceRef
if (!ctx) return
const store = yield* InstanceStore.Service
return yield* Effect.gen(function* () {
const svc = yield* Session.Service
const sessionID = SessionID.make(args.sessionID)
try {
await AppRuntime.runPromise(Session.Service.use((svc) => svc.get(sessionID)))
} catch {
UI.error(`Session not found: ${args.sessionID}`)
process.exit(1)
}
await AppRuntime.runPromise(Session.Service.use((svc) => svc.remove(sessionID)))
// Match legacy try/catch — Session.get surfaces NotFoundError as a defect.
yield* svc.get(sessionID).pipe(Effect.catchCause(() => fail(`Session not found: ${args.sessionID}`)))
yield* svc.remove(sessionID)
UI.println(UI.Style.TEXT_SUCCESS_BOLD + `Session ${args.sessionID} deleted` + UI.Style.TEXT_NORMAL)
})
},
}).pipe(Effect.ensuring(store.dispose(ctx)))
}),
})
export const SessionListCommand = cmd({
export const SessionListCommand = effectCmd({
command: "list",
describe: "list sessions",
builder: (yargs: Argv) => {
return yargs
builder: (yargs) =>
yargs
.option("max-count", {
alias: "n",
describe: "limit to N most recent sessions",
@@ -87,47 +88,42 @@ export const SessionListCommand = cmd({
type: "string",
choices: ["table", "json"],
default: "table",
})
},
handler: async (args) => {
await bootstrap(process.cwd(), async () => {
const sessions = await AppRuntime.runPromise(
Session.Service.use((svc) => svc.list({ roots: true, limit: args.maxCount })),
)
}),
handler: Effect.fn("Cli.session.list")(function* (args) {
const ctx = yield* InstanceRef
if (!ctx) return
const store = yield* InstanceStore.Service
return yield* Effect.gen(function* () {
const sessions = yield* Session.Service.use((svc) => svc.list({ roots: true, limit: args.maxCount }))
if (sessions.length === 0) {
return
}
if (sessions.length === 0) return
let output: string
if (args.format === "json") {
output = formatSessionJSON(sessions)
} else {
output = formatSessionTable(sessions)
}
const output = args.format === "json" ? formatSessionJSON(sessions) : formatSessionTable(sessions)
const shouldPaginate = process.stdout.isTTY && !args.maxCount && args.format === "table"
if (shouldPaginate) {
const proc = Process.spawn(pagerCmd(), {
stdin: "pipe",
stdout: "inherit",
stderr: "inherit",
yield* Effect.promise(async () => {
const proc = Process.spawn(pagerCmd(), {
stdin: "pipe",
stdout: "inherit",
stderr: "inherit",
})
if (!proc.stdin) {
console.log(output)
return
}
proc.stdin.write(output)
proc.stdin.end()
await proc.exited
})
if (!proc.stdin) {
console.log(output)
return
}
proc.stdin.write(output)
proc.stdin.end()
await proc.exited
} else {
console.log(output)
}
})
},
}).pipe(Effect.ensuring(store.dispose(ctx)))
}),
})
function formatSessionTable(sessions: Session.Info[]): string {

View File

@@ -1,11 +1,11 @@
import type { Argv } from "yargs"
import { cmd } from "./cmd"
import { Effect } from "effect"
import { effectCmd } from "../effect-cmd"
import { Session } from "@/session/session"
import { bootstrap } from "../bootstrap"
import { Database } from "@/storage/db"
import { SessionTable } from "../../session/session.sql"
import { Project } from "@/project/project"
import { Instance } from "../../project/instance"
import { InstanceRef } from "@/effect/instance-ref"
import { InstanceStore } from "@/project/instance-store"
import { AppRuntime } from "@/effect/app-runtime"
interface SessionStats {
@@ -47,11 +47,11 @@ interface SessionStats {
medianTokensPerSession: number
}
export const StatsCommand = cmd({
export const StatsCommand = effectCmd({
command: "stats",
describe: "show token usage and cost statistics",
builder: (yargs: Argv) => {
return yargs
builder: (yargs) =>
yargs
.option("days", {
describe: "show stats for the last N days (default: all time)",
type: "number",
@@ -66,34 +66,42 @@ export const StatsCommand = cmd({
.option("project", {
describe: "filter by project (default: all projects, empty string: current project)",
type: "string",
})
},
handler: async (args) => {
await bootstrap(process.cwd(), async () => {
const stats = await aggregateSessionStats(args.days, args.project)
let modelLimit: number | undefined
if (args.models === true) {
modelLimit = Infinity
} else if (typeof args.models === "number") {
modelLimit = args.models
}
displayStats(stats, args.tools, modelLimit)
})
},
}),
handler: Effect.fn("Cli.stats")(function* (args) {
const ctx = yield* InstanceRef
if (!ctx) return
const store = yield* InstanceStore.Service
return yield* run(args, ctx.project).pipe(Effect.ensuring(store.dispose(ctx)))
}),
})
async function getCurrentProject(): Promise<Project.Info> {
return Instance.project
}
const run = (
args: { days?: number; tools?: number; models?: unknown; project?: string },
currentProject: Project.Info,
) =>
Effect.promise(async () => {
const stats = await aggregateSessionStats(args.days, args.project, currentProject)
let modelLimit: number | undefined
if (args.models === true) {
modelLimit = Infinity
} else if (typeof args.models === "number") {
modelLimit = args.models
}
displayStats(stats, args.tools, modelLimit)
})
async function getAllSessions(): Promise<Session.Info[]> {
const rows = Database.use((db) => db.select().from(SessionTable).all())
return rows.map((row) => Session.fromRow(row))
}
export async function aggregateSessionStats(days?: number, projectFilter?: string): Promise<SessionStats> {
export async function aggregateSessionStats(
days?: number,
projectFilter?: string,
currentProject?: Project.Info,
): Promise<SessionStats> {
const sessions = await getAllSessions()
const MS_IN_DAY = 24 * 60 * 60 * 1000
@@ -117,7 +125,7 @@ export async function aggregateSessionStats(days?: number, projectFilter?: strin
if (projectFilter !== undefined) {
if (projectFilter === "") {
const currentProject = await getCurrentProject()
if (!currentProject) throw new Error("currentProject required when projectFilter is empty string")
filteredSessions = filteredSessions.filter((session) => session.projectID === currentProject.id)
} else {
filteredSessions = filteredSessions.filter((session) => session.projectID === projectFilter)

View File

@@ -28,6 +28,7 @@ import { useEvent } from "@tui/context/event"
import { SDKProvider, useSDK } from "@tui/context/sdk"
import { StartupLoading } from "@tui/component/startup-loading"
import { SyncProvider, useSync } from "@tui/context/sync"
import { SyncProviderV2 } from "@tui/context/sync-v2"
import { LocalProvider, useLocal } from "@tui/context/local"
import { DialogModel } from "@tui/component/dialog-model"
import { useConnected } from "@tui/component/use-connected"
@@ -168,27 +169,29 @@ export function tui(input: {
>
<ProjectProvider>
<SyncProvider>
<ThemeProvider mode={mode}>
<LocalProvider>
<KeybindProvider>
<PromptStashProvider>
<DialogProvider>
<CommandProvider>
<FrecencyProvider>
<PromptHistoryProvider>
<PromptRefProvider>
<EditorContextProvider>
<App onSnapshot={input.onSnapshot} />
</EditorContextProvider>
</PromptRefProvider>
</PromptHistoryProvider>
</FrecencyProvider>
</CommandProvider>
</DialogProvider>
</PromptStashProvider>
</KeybindProvider>
</LocalProvider>
</ThemeProvider>
<SyncProviderV2>
<ThemeProvider mode={mode}>
<LocalProvider>
<KeybindProvider>
<PromptStashProvider>
<DialogProvider>
<CommandProvider>
<FrecencyProvider>
<PromptHistoryProvider>
<PromptRefProvider>
<EditorContextProvider>
<App onSnapshot={input.onSnapshot} />
</EditorContextProvider>
</PromptRefProvider>
</PromptHistoryProvider>
</FrecencyProvider>
</CommandProvider>
</DialogProvider>
</PromptStashProvider>
</KeybindProvider>
</LocalProvider>
</ThemeProvider>
</SyncProviderV2>
</SyncProvider>
</ProjectProvider>
</SDKProvider>
@@ -507,6 +510,7 @@ function App(props: { onSnapshot?: () => Promise<string[]> }) {
{
title: "Toggle MCPs",
value: "mcp.list",
search: "toggle mcps",
category: "Agent",
slash: {
name: "mcps",
@@ -613,6 +617,7 @@ function App(props: { onSnapshot?: () => Promise<string[]> }) {
{
title: mode() === "dark" ? "Switch to light mode" : "Switch to dark mode",
value: "theme.switch_mode",
search: "toggle appearance",
onSelect: (dialog) => {
setMode(mode() === "dark" ? "light" : "dark")
dialog.clear()
@@ -661,6 +666,7 @@ function App(props: { onSnapshot?: () => Promise<string[]> }) {
},
{
title: "Toggle debug panel",
search: "toggle debug",
category: "System",
value: "app.debug",
onSelect: (dialog) => {
@@ -670,6 +676,7 @@ function App(props: { onSnapshot?: () => Promise<string[]> }) {
},
{
title: "Toggle console",
search: "toggle console",
category: "System",
value: "app.console",
onSelect: (dialog) => {
@@ -711,6 +718,7 @@ function App(props: { onSnapshot?: () => Promise<string[]> }) {
{
title: terminalTitleEnabled() ? "Disable terminal title" : "Enable terminal title",
value: "terminal.title.toggle",
search: "toggle terminal title",
keybind: "terminal_title_toggle",
category: "System",
onSelect: (dialog) => {
@@ -726,6 +734,7 @@ function App(props: { onSnapshot?: () => Promise<string[]> }) {
{
title: kv.get("animations_enabled", true) ? "Disable animations" : "Enable animations",
value: "app.toggle.animations",
search: "toggle animations",
category: "System",
onSelect: (dialog) => {
kv.set("animations_enabled", !kv.get("animations_enabled", true))
@@ -769,6 +778,7 @@ function App(props: { onSnapshot?: () => Promise<string[]> }) {
{
title: kv.get("diff_wrap_mode", "word") === "word" ? "Disable diff wrapping" : "Enable diff wrapping",
value: "app.toggle.diffwrap",
search: "toggle diff wrapping",
category: "System",
onSelect: (dialog) => {
const current = kv.get("diff_wrap_mode", "word")
@@ -776,6 +786,15 @@ function App(props: { onSnapshot?: () => Promise<string[]> }) {
dialog.clear()
},
},
{
title: kv.get("clear_prompt_save_history", false) ? "Don't include cleared prompts in history" : "Include cleared prompts in history",
value: "app.toggle.clear_prompt_history",
category: "System",
onSelect: (dialog) => {
kv.set("clear_prompt_save_history", !kv.get("clear_prompt_save_history", false))
dialog.clear()
},
},
])
event.on(TuiEvent.CommandExecute.type, (evt) => {

View File

@@ -82,6 +82,7 @@ export const { use: usePromptHistory, provider: PromptHistoryProvider } = create
return store.history.at(store.index)
},
append(item: PromptInfo) {
if (store.history.at(-1)?.input === item.input) return
const entry = structuredClone(unwrap(item))
let trimmed = false
setStore(

View File

@@ -139,6 +139,7 @@ export function Prompt(props: PromptProps) {
const animationsEnabled = createMemo(() => kv.get("animations_enabled", true))
const list = createMemo(() => props.placeholders?.normal ?? [])
const shell = createMemo(() => props.placeholders?.shell ?? [])
const [autoaccept, setAutoaccept] = kv.signal<"none" | "edit">("permission_auto_accept", "edit")
const fileContextEnabled = createMemo(() => kv.get("file_context_enabled", true))
const [dismissedEditorSelectionKey, setDismissedEditorSelectionKey] = createSignal<string>()
const editorContext = createMemo(() => {
@@ -296,6 +297,17 @@ export function Prompt(props: PromptProps) {
command.register(() => {
return [
{
title: autoaccept() === "none" ? "Enable autoedit" : "Disable autoedit",
value: "permission.auto_accept.toggle",
search: "toggle permissions",
keybind: "permission_auto_accept_toggle",
category: "Agent",
onSelect: (dialog) => {
setAutoaccept(() => (autoaccept() === "none" ? "edit" : "none"))
dialog.clear()
},
},
{
title: "Clear prompt",
value: "prompt.clear",
@@ -750,9 +762,18 @@ export function Prompt(props: PromptProps) {
return false
}
const variant = local.model.variant.current()
let sessionID = props.sessionID
if (sessionID == null) {
const res = await sdk.client.session.create({ workspace: props.workspaceID })
const res = await sdk.client.session.create({
workspace: props.workspaceID,
agent: agent.name,
model: {
providerID: selectedModel.providerID,
id: selectedModel.modelID,
variant,
},
})
if (res.error) {
console.log("Creating a session failed:", res.error)
@@ -792,7 +813,6 @@ export function Prompt(props: PromptProps) {
// Capture mode before it gets reset
const currentMode = store.mode
const variant = local.model.variant.current()
const editorSelection = editorContext()
const currentEditorSelectionKey = editorSelectionKey(editorSelection)
const editorParts =
@@ -1116,6 +1136,12 @@ export function Prompt(props: PromptProps) {
// If no image, let the default paste behavior continue
}
if (keybind.match("input_clear", e) && store.prompt.input !== "") {
if (kv.get("clear_prompt_save_history", false)) {
history.append({
...store.prompt,
mode: store.mode,
})
}
input.clear()
input.extmarks.clear()
setStore("prompt", {
@@ -1308,11 +1334,18 @@ export function Prompt(props: PromptProps) {
)}
</Show>
</box>
<Show when={hasRightContent()}>
<box flexDirection="row" gap={1} alignItems="center">
{props.right}
</box>
</Show>
<box flexDirection="row" gap={1} alignItems="center">
<Show when={autoaccept() === "edit"}>
<text>
<span style={{ fg: theme.warning }}>autoedit</span>
</text>
</Show>
<Show when={hasRightContent()}>
<box flexDirection="row" gap={1} alignItems="center">
{props.right}
</box>
</Show>
</box>
</box>
</box>
</box>

View File

@@ -0,0 +1,298 @@
import { useEvent } from "@tui/context/event"
import type {
SessionMessage,
SessionMessageAssistant,
SessionMessageAssistantReasoning,
SessionMessageAssistantText,
SessionMessageAssistantTool,
} from "@opencode-ai/sdk/v2"
import { createStore, produce, reconcile } from "solid-js/store"
import { createSimpleContext } from "./helper"
import { useSDK } from "./sdk"
function activeAssistant(messages: SessionMessage[]) {
const index = messages.findLastIndex((message) => message.type === "assistant" && !message.time.completed)
if (index < 0) return
const assistant = messages[index]
return assistant?.type === "assistant" ? assistant : undefined
}
function activeCompaction(messages: SessionMessage[]) {
const index = messages.findLastIndex((message) => message.type === "compaction")
if (index < 0) return
const compaction = messages[index]
return compaction?.type === "compaction" ? compaction : undefined
}
function activeShell(messages: SessionMessage[], callID: string) {
const index = messages.findLastIndex((message) => message.type === "shell" && message.callID === callID)
if (index < 0) return
const shell = messages[index]
return shell?.type === "shell" ? shell : undefined
}
function latestTool(assistant: SessionMessageAssistant | undefined, callID?: string) {
return assistant?.content.findLast(
(item): item is SessionMessageAssistantTool => item.type === "tool" && (callID === undefined || item.id === callID),
)
}
function latestText(assistant: SessionMessageAssistant | undefined) {
return assistant?.content.findLast((item): item is SessionMessageAssistantText => item.type === "text")
}
function latestReasoning(assistant: SessionMessageAssistant | undefined, reasoningID: string) {
return assistant?.content.findLast(
(item): item is SessionMessageAssistantReasoning => item.type === "reasoning" && item.id === reasoningID,
)
}
export const { use: useSyncV2, provider: SyncProviderV2 } = createSimpleContext({
name: "SyncV2",
init: () => {
const [store, setStore] = createStore<{
messages: {
[sessionID: string]: SessionMessage[]
}
}>({
messages: {},
})
const event = useEvent()
const sdk = useSDK()
function update(sessionID: string, fn: (messages: SessionMessage[]) => void) {
setStore(
"messages",
produce((draft) => {
fn((draft[sessionID] ??= []))
}),
)
}
event.subscribe((event) => {
switch (event.type) {
case "session.next.prompted": {
update(event.properties.sessionID, (draft) => {
draft.push({
id: event.id,
type: "user",
text: event.properties.prompt.text,
files: event.properties.prompt.files,
agents: event.properties.prompt.agents,
time: { created: event.properties.timestamp },
})
})
break
}
case "session.next.synthetic":
update(event.properties.sessionID, (draft) => {
draft.push({
id: event.id,
type: "synthetic",
sessionID: event.properties.sessionID,
text: event.properties.text,
time: { created: event.properties.timestamp },
})
})
break
case "session.next.shell.started":
update(event.properties.sessionID, (draft) => {
draft.push({
id: event.id,
type: "shell",
callID: event.properties.callID,
command: event.properties.command,
output: "",
time: { created: event.properties.timestamp },
})
})
break
case "session.next.shell.ended":
update(event.properties.sessionID, (draft) => {
const match = activeShell(draft, event.properties.callID)
if (!match) return
match.output = event.properties.output
match.time.completed = event.properties.timestamp
})
break
case "session.next.step.started":
update(event.properties.sessionID, (draft) => {
const currentAssistant = activeAssistant(draft)
if (currentAssistant) currentAssistant.time.completed = event.properties.timestamp
draft.push({
id: event.id,
type: "assistant",
agent: event.properties.agent,
model: event.properties.model,
content: [],
snapshot: event.properties.snapshot ? { start: event.properties.snapshot } : undefined,
time: { created: event.properties.timestamp },
})
})
break
case "session.next.step.ended":
update(event.properties.sessionID, (draft) => {
const currentAssistant = activeAssistant(draft)
if (!currentAssistant) return
currentAssistant.time.completed = event.properties.timestamp
currentAssistant.finish = event.properties.finish
currentAssistant.cost = event.properties.cost
currentAssistant.tokens = event.properties.tokens
if (event.properties.snapshot)
currentAssistant.snapshot = { ...currentAssistant.snapshot, end: event.properties.snapshot }
})
break
case "session.next.text.started":
update(event.properties.sessionID, (draft) => {
activeAssistant(draft)?.content.push({ type: "text", text: "" })
})
break
case "session.next.text.delta":
update(event.properties.sessionID, (draft) => {
const match = latestText(activeAssistant(draft))
if (match) match.text += event.properties.delta
})
break
case "session.next.text.ended":
update(event.properties.sessionID, (draft) => {
const match = latestText(activeAssistant(draft))
if (match) match.text = event.properties.text
})
break
case "session.next.tool.input.started":
update(event.properties.sessionID, (draft) => {
activeAssistant(draft)?.content.push({
type: "tool",
id: event.properties.callID,
name: event.properties.name,
time: { created: event.properties.timestamp },
state: { status: "pending", input: "" },
})
})
break
case "session.next.tool.input.delta":
update(event.properties.sessionID, (draft) => {
const match = latestTool(activeAssistant(draft), event.properties.callID)
if (match?.state.status === "pending") match.state.input += event.properties.delta
})
break
case "session.next.tool.input.ended":
break
case "session.next.tool.called":
update(event.properties.sessionID, (draft) => {
const match = latestTool(activeAssistant(draft), event.properties.callID)
if (!match) return
match.time.ran = event.properties.timestamp
match.provider = event.properties.provider
match.state = { status: "running", input: event.properties.input, structured: {}, content: [] }
})
break
case "session.next.tool.progress":
update(event.properties.sessionID, (draft) => {
const match = latestTool(activeAssistant(draft), event.properties.callID)
if (match?.state.status !== "running") return
match.state.structured = event.properties.structured
match.state.content = [...event.properties.content]
})
break
case "session.next.tool.success":
update(event.properties.sessionID, (draft) => {
const match = latestTool(activeAssistant(draft), event.properties.callID)
if (match?.state.status !== "running") return
match.state = {
status: "completed",
input: match.state.input,
structured: event.properties.structured,
content: [...event.properties.content],
}
match.provider = event.properties.provider
match.time.completed = event.properties.timestamp
})
break
case "session.next.tool.error":
update(event.properties.sessionID, (draft) => {
const match = latestTool(activeAssistant(draft), event.properties.callID)
if (match?.state.status !== "running") return
match.state = {
status: "error",
error: event.properties.error,
input: match.state.input,
structured: match.state.structured,
content: match.state.content,
}
match.provider = event.properties.provider
match.time.completed = event.properties.timestamp
})
break
case "session.next.reasoning.started":
update(event.properties.sessionID, (draft) => {
activeAssistant(draft)?.content.push({
type: "reasoning",
id: event.properties.reasoningID,
text: "",
})
})
break
case "session.next.reasoning.delta":
update(event.properties.sessionID, (draft) => {
const match = latestReasoning(activeAssistant(draft), event.properties.reasoningID)
if (match) match.text += event.properties.delta
})
break
case "session.next.reasoning.ended":
update(event.properties.sessionID, (draft) => {
const match = latestReasoning(activeAssistant(draft), event.properties.reasoningID)
if (match) match.text = event.properties.text
})
break
case "session.next.retried":
break
case "session.next.compaction.started":
update(event.properties.sessionID, (draft) => {
draft.push({
id: event.id,
type: "compaction",
reason: event.properties.reason,
summary: "",
time: { created: event.properties.timestamp },
})
})
break
case "session.next.compaction.delta":
update(event.properties.sessionID, (draft) => {
const match = activeCompaction(draft)
if (match) match.summary += event.properties.text
})
break
case "session.next.compaction.ended":
update(event.properties.sessionID, (draft) => {
const match = activeCompaction(draft)
if (!match) return
match.summary = event.properties.text
match.include = event.properties.include
})
break
}
})
const result = {
data: store,
session: {
message: {
async sync(sessionID: string) {
const response = await sdk.client.v2.session.messages({ sessionID })
setStore("messages", sessionID, reconcile(response.data?.items ?? []))
},
fromSession(sessionID: string) {
const messages = store.messages[sessionID]
if (!messages) return []
return messages
},
},
},
}
return result
},
})

View File

@@ -27,11 +27,11 @@ import { createSimpleContext } from "./helper"
import type { Snapshot } from "@/snapshot"
import { useExit } from "./exit"
import { useArgs } from "./args"
import { useKV } from "./kv"
import { batch, onMount } from "solid-js"
import * as Log from "@opencode-ai/core/util/log"
import { emptyConsoleState, type ConsoleState } from "@/config/console-state"
import path from "path"
import { useKV } from "./kv"
export const { use: useSync, provider: SyncProvider } = createSimpleContext({
name: "Sync",
@@ -110,6 +110,7 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({
const project = useProject()
const sdk = useSDK()
const kv = useKV()
const [autoaccept] = kv.signal<"none" | "edit">("permission_auto_accept", "edit")
const fullSyncedSessions = new Set<string>()
let syncedWorkspace = project.workspace.current()
@@ -152,6 +153,13 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({
case "permission.asked": {
const request = event.properties
if (autoaccept() === "edit" && request.permission === "edit") {
sdk.client.permission.reply({
reply: "once",
requestID: request.id,
})
break
}
const requests = store.permission[request.sessionID]
if (!requests) {
setStore("permission", request.sessionID, [request])

File diff suppressed because it is too large Load Diff

View File

@@ -7,6 +7,7 @@ import SidebarTodo from "../feature-plugins/sidebar/todo"
import SidebarFiles from "../feature-plugins/sidebar/files"
import SidebarFooter from "../feature-plugins/sidebar/footer"
import PluginManager from "../feature-plugins/system/plugins"
import SessionV2Debug from "../feature-plugins/system/session-v2"
import type { TuiPlugin, TuiPluginModule } from "@opencode-ai/plugin/tui"
export type InternalTuiPlugin = TuiPluginModule & {
@@ -24,4 +25,5 @@ export const INTERNAL_TUI_PLUGINS: InternalTuiPlugin[] = [
SidebarFiles,
SidebarFooter,
PluginManager,
SessionV2Debug,
]

View File

@@ -37,7 +37,8 @@ import { Locale } from "@/util/locale"
import type { Tool } from "@/tool/tool"
import type { ReadTool } from "@/tool/read"
import type { WriteTool } from "@/tool/write"
import { BashTool } from "@/tool/bash"
import { ShellTool } from "@/tool/shell"
import { ShellToolID } from "@/tool/shell/id"
import type { GlobTool } from "@/tool/glob"
import { TodoWriteTool } from "@/tool/todo"
import type { GrepTool } from "@/tool/grep"
@@ -607,6 +608,7 @@ export function Session() {
{
title: sidebarVisible() ? "Hide sidebar" : "Show sidebar",
value: "session.sidebar.toggle",
search: "toggle sidebar",
keybind: "sidebar_toggle",
category: "Session",
onSelect: (dialog) => {
@@ -622,6 +624,7 @@ export function Session() {
title: conceal() ? "Disable code concealment" : "Enable code concealment",
value: "session.toggle.conceal",
keybind: "messages_toggle_conceal",
search: "toggle code concealment",
category: "Session",
onSelect: (dialog) => {
setConceal((prev) => !prev)
@@ -631,6 +634,7 @@ export function Session() {
{
title: showTimestamps() ? "Hide timestamps" : "Show timestamps",
value: "session.toggle.timestamps",
search: "toggle timestamps",
category: "Session",
slash: {
name: "timestamps",
@@ -644,6 +648,7 @@ export function Session() {
{
title: showThinking() ? "Hide thinking" : "Show thinking",
value: "session.toggle.thinking",
search: "toggle thinking",
keybind: "display_thinking",
category: "Session",
slash: {
@@ -658,6 +663,7 @@ export function Session() {
{
title: showDetails() ? "Hide tool details" : "Show tool details",
value: "session.toggle.actions",
search: "toggle tool details",
keybind: "tool_details",
category: "Session",
onSelect: (dialog) => {
@@ -666,8 +672,9 @@ export function Session() {
},
},
{
title: "Toggle session scrollbar",
title: showScrollbar() ? "Hide session scrollbar" : "Show session scrollbar",
value: "session.toggle.scrollbar",
search: "toggle session scrollbar",
keybind: "scrollbar_toggle",
category: "Session",
onSelect: (dialog) => {
@@ -1552,8 +1559,8 @@ function ToolPart(props: { last: boolean; part: ToolPart; message: AssistantMess
return (
<Show when={!shouldHide()}>
<Switch>
<Match when={props.part.tool === "bash"}>
<Bash {...toolprops} />
<Match when={props.part.tool === ShellToolID.id}>
<Shell {...toolprops} />
</Match>
<Match when={props.part.tool === "glob"}>
<Glob {...toolprops} />
@@ -1784,7 +1791,7 @@ function BlockTool(props: {
)
}
function Bash(props: ToolProps<typeof BashTool>) {
function Shell(props: ToolProps<typeof ShellTool>) {
const { theme } = useTheme()
const sync = useSync()
const isRunning = createMemo(() => props.part.state.status === "running")

View File

@@ -15,6 +15,7 @@ import { LANGUAGE_EXTENSIONS } from "@/lsp/language"
import { Keybind } from "@/util/keybind"
import { Locale } from "@/util/locale"
import { Global } from "@opencode-ai/core/global"
import { ShellToolID } from "@/tool/shell/id"
import { useDialog } from "../../ui/dialog"
import { getScrollAcceleration } from "../../util/scroll"
import { useTuiConfig } from "../../context/tui-config"
@@ -287,7 +288,7 @@ export function PermissionPrompt(props: { request: PermissionRequest }) {
}
}
if (permission === "bash") {
if (permission === ShellToolID.id) {
const title =
typeof data.description === "string" && data.description ? data.description : "Shell command"
const command = typeof data.command === "string" ? data.command : ""

View File

@@ -8,6 +8,7 @@ import { UI } from "@/cli/ui"
import * as Log from "@opencode-ai/core/util/log"
import { errorMessage } from "@/util/error"
import { withTimeout } from "@/util/timeout"
import { Instance } from "@/project/instance"
import { withNetworkOptions, resolveNetworkOptionsNoConfig } from "@/cli/network"
import { Filesystem } from "@/util/filesystem"
import type { GlobalEvent } from "@opencode-ai/sdk/v2"
@@ -190,7 +191,11 @@ export const TuiThreadCommand = cmd({
const prompt = await input(args.prompt)
const config = await TuiConfig.get()
const network = resolveNetworkOptionsNoConfig(args)
const network = await Instance.provide({
directory: cwd,
fn: () => resolveNetworkOptionsNoConfig(args),
})
const external =
process.argv.includes("--port") ||
process.argv.includes("--hostname") ||

View File

@@ -37,6 +37,7 @@ export interface DialogSelectOption<T = any> {
title: string
value: T
description?: string
search?: string
footer?: JSX.Element | string
category?: string
categoryView?: JSX.Element
@@ -93,8 +94,8 @@ export function DialogSelect<T>(props: DialogSelectProps<T>) {
// users typically search by the item name, and not its category.
const result = fuzzysort
.go(needle, options, {
keys: ["title", "category"],
scoreFn: (r) => r[0].score * 2 + r[1].score,
keys: ["title", "category", "search"],
scoreFn: (r) => r[0].score * 2 + r[1].score + r[2].score,
})
.map((x) => x.obj)

View File

@@ -2,7 +2,7 @@ import { Installation } from "@/installation"
import { Server } from "@/server/server"
import * as Log from "@opencode-ai/core/util/log"
import { Instance } from "@/project/instance"
import { InstanceRuntime } from "@/project/instance-runtime"
import { InstanceStore } from "@/project/instance-store"
import { Rpc } from "@/util/rpc"
import { upgrade } from "@/cli/upgrade"
import { Config } from "@/config/config"
@@ -10,10 +10,8 @@ import { GlobalBus } from "@/bus/global"
import { Flag } from "@opencode-ai/core/flag/flag"
import { writeHeapSnapshot } from "node:v8"
import { Heap } from "@/cli/heap"
import { AppRuntime } from "@/effect/app-runtime"
import { AppRuntime, getBootstrapRunEffect } from "@/effect/app-runtime"
import { ensureProcessMetadata } from "@opencode-ai/core/util/opencode-process"
import { Effect } from "effect"
import { disposeAllInstancesAndEmitGlobalDisposed } from "@/server/global-lifecycle"
ensureProcessMetadata("worker")
@@ -79,24 +77,19 @@ export const rpc = {
async checkUpgrade(input: { directory: string }) {
await Instance.provide({
directory: input.directory,
init: await getBootstrapRunEffect(),
fn: async () => {
await upgrade().catch(() => {})
},
})
},
async reload() {
await AppRuntime.runPromise(
Effect.gen(function* () {
const cfg = yield* Config.Service
yield* cfg.invalidate()
yield* disposeAllInstancesAndEmitGlobalDisposed({ swallowErrors: true })
}),
)
await AppRuntime.runPromise(Config.Service.use((cfg) => cfg.invalidate(true)))
},
async shutdown() {
Log.Default.info("worker shutting down")
await InstanceRuntime.disposeAllInstances()
await InstanceStore.disposeAllInstances()
if (server) await server.stop(true)
},
}

View File

@@ -5,6 +5,7 @@ import { withNetworkOptions, resolveNetworkOptions } from "../network"
import { Flag } from "@opencode-ai/core/flag/flag"
import open from "open"
import { networkInterfaces } from "os"
import { bootstrap } from "../bootstrap"
function getNetworkIPs() {
const nets = networkInterfaces()
@@ -36,7 +37,7 @@ export const WebCommand = cmd({
if (!Flag.OPENCODE_SERVER_PASSWORD) {
UI.println(UI.Style.TEXT_WARNING_BOLD + "! OPENCODE_SERVER_PASSWORD is not set; server is unsecured.")
}
const opts = await resolveNetworkOptions(args)
const opts = await bootstrap(process.cwd(), () => resolveNetworkOptions(args))
const server = await Server.listen(opts)
UI.empty()
UI.println(UI.logo(" "))

View File

@@ -31,6 +31,7 @@ export const fail = (message: string, exitCode = 1) => Effect.fail(new CliError(
*/
export const effectCmd = <Args, A>(opts: {
command: string | readonly string[]
aliases?: string | readonly string[]
describe: string | false
builder?: (yargs: Argv) => Argv<Args>
/** Defaults to process.cwd(). Override for commands that take a directory positional. */
@@ -39,6 +40,7 @@ export const effectCmd = <Args, A>(opts: {
}) =>
cmd<{}, Args>({
command: opts.command,
aliases: opts.aliases,
describe: opts.describe,
builder: opts.builder as never,
async handler(rawArgs) {

View File

@@ -12,8 +12,11 @@ import { Auth } from "../auth"
import { Env } from "../env"
import { applyEdits, modify } from "jsonc-parser"
import { type InstanceContext } from "../project/instance"
import { InstanceStore } from "../project/instance-store"
import { InstallationLocal, InstallationVersion } from "@opencode-ai/core/installation/version"
import { existsSync } from "fs"
import { GlobalBus } from "@/bus/global"
import { Event } from "../server/event"
import { Account } from "@/account/account"
import { isRecord } from "@/util/record"
import type { ConsoleState } from "./console-state"
@@ -286,9 +289,9 @@ export interface Interface {
readonly get: () => Effect.Effect<Info>
readonly getGlobal: () => Effect.Effect<Info>
readonly getConsoleState: () => Effect.Effect<ConsoleState>
readonly update: (config: Info) => Effect.Effect<void>
readonly updateGlobal: (config: Info) => Effect.Effect<{ info: Info; changed: boolean }>
readonly invalidate: () => Effect.Effect<void>
readonly update: (config: Info, options?: { dispose?: boolean }) => Effect.Effect<void>
readonly updateGlobal: (config: Info) => Effect.Effect<Info>
readonly invalidate: (wait?: boolean) => Effect.Effect<void>
readonly directories: () => Effect.Effect<string[]>
readonly waitForDependencies: () => Effect.Effect<void>
}
@@ -727,17 +730,37 @@ export const layer = Layer.effect(
)
})
const update = Effect.fn("Config.update")(function* (config: Info) {
const update = Effect.fn("Config.update")(function* (config: Info, options?: { dispose?: boolean }) {
const dir = yield* InstanceState.directory
const file = path.join(dir, "config.json")
const existing = yield* loadFile(file)
yield* fs
.writeFileString(file, JSON.stringify(mergeDeep(writable(existing), writable(config)), null, 2))
.pipe(Effect.orDie)
if (options?.dispose !== false) {
// Fail loudly if no instance is bound — silently skipping would
// mask "config update without an active instance" bugs. The throw
// comes from `Instance.current` inside `InstanceState.context`.
const ctx = yield* InstanceState.context
yield* Effect.promise(() => InstanceStore.disposeInstance(ctx))
}
})
const invalidate = Effect.fn("Config.invalidate")(function* () {
const invalidate = Effect.fn("Config.invalidate")(function* (wait?: boolean) {
yield* invalidateGlobal
const task = InstanceStore.disposeAllInstances()
.catch(() => undefined)
.finally(() =>
GlobalBus.emit("event", {
directory: "global",
payload: {
type: Event.Disposed.type,
properties: {},
},
}),
)
if (wait) yield* Effect.promise(() => task)
else void task
})
const updateGlobal = Effect.fn("Config.updateGlobal")(function* (config: Info) {
@@ -761,8 +784,9 @@ export const layer = Layer.effect(
if (changed) yield* fs.writeFileString(file, updated).pipe(Effect.orDie)
}
// Only tear down running instances if the config actually changed.
if (changed) yield* invalidate()
return { info: next, changed }
return next
})
return Service.of({

View File

@@ -1,4 +1,4 @@
import { Layer, ManagedRuntime } from "effect"
import { Effect, Layer, ManagedRuntime } from "effect"
import { attach } from "./run-service"
import * as Observability from "@opencode-ai/core/effect/observability"
@@ -40,7 +40,8 @@ import { Command } from "@/command"
import { Truncate } from "@/tool/truncate"
import { ToolRegistry } from "@/tool/registry"
import { Format } from "@/format"
import { InstanceRuntime } from "@/project/instance-runtime"
import { InstanceBootstrap } from "@/project/bootstrap"
import { InstanceStore } from "@/project/instance-store"
import { Project } from "@/project/project"
import { Vcs } from "@/project/vcs"
import { Workspace } from "@/control-plane/workspace"
@@ -53,13 +54,30 @@ import { SyncEvent } from "@/sync"
import { Npm } from "@opencode-ai/core/npm"
import { memoMap } from "@opencode-ai/core/effect/memo-map"
// Adjusts the default Config layer to ensure that plugins are always initialised before
// any other layers read the current config
const ConfigWithPluginPriority = Layer.effect(
Config.Service,
Effect.gen(function* () {
const config = yield* Config.Service
const plugin = yield* Plugin.Service
return {
...config,
get: () => Effect.andThen(plugin.init(), config.get),
getGlobal: () => Effect.andThen(plugin.init(), config.getGlobal),
getConsoleState: () => Effect.andThen(plugin.init(), config.getConsoleState),
}
}),
).pipe(Layer.provide(Layer.merge(Plugin.defaultLayer, Config.defaultLayer)))
export const AppLayer = Layer.mergeAll(
Npm.defaultLayer,
AppFileSystem.defaultLayer,
Bus.defaultLayer,
Auth.defaultLayer,
Account.defaultLayer,
Config.defaultLayer,
ConfigWithPluginPriority,
Git.defaultLayer,
Ripgrep.defaultLayer,
File.defaultLayer,
@@ -93,7 +111,8 @@ export const AppLayer = Layer.mergeAll(
Truncate.defaultLayer,
ToolRegistry.defaultLayer,
Format.defaultLayer,
InstanceRuntime.layer,
InstanceBootstrap.defaultLayer,
InstanceStore.defaultLayer,
Project.defaultLayer,
Vcs.defaultLayer,
Workspace.defaultLayer,
@@ -130,3 +149,15 @@ export const AppRuntime: Runtime = {
},
dispose: () => rt.dispose(),
}
let bootstrapRun: Promise<Effect.Effect<void>>
export function getBootstrapRunEffect(): Promise<Effect.Effect<void>> {
if (!bootstrapRun) {
bootstrapRun = AppRuntime.runPromise(
Effect.gen(function* () {
return (yield* InstanceBootstrap.Service).run
}),
)
}
return bootstrapRun
}

View File

@@ -1,9 +0,0 @@
import { Context, Effect } from "effect"
export interface Interface {
readonly run: Effect.Effect<void>
}
export class Service extends Context.Service<Service, Interface>()("@opencode/InstanceBootstrap") {}
export * as InstanceBootstrap from "./bootstrap-service"

View File

@@ -1,4 +1,3 @@
import { Plugin } from "../plugin"
import { Format } from "../format"
import { LSP } from "@/lsp/lsp"
import { File } from "../file"
@@ -10,26 +9,27 @@ import { Command } from "../command"
import { InstanceState } from "@/effect/instance-state"
import { FileWatcher } from "@/file/watcher"
import { ShareNext } from "@/share/share-next"
import { Effect, Layer } from "effect"
import { Context, Effect, Layer } from "effect"
import { Config } from "@/config/config"
import { Service } from "./bootstrap-service"
export { Service } from "./bootstrap-service"
export type { Interface } from "./bootstrap-service"
export interface Interface {
readonly run: Effect.Effect<void>
}
export class Service extends Context.Service<Service, Interface>()("@opencode/InstanceBootstrap") {}
export const layer = Layer.effect(
Service,
Effect.gen(function* () {
// Yield each bootstrap dep at layer init so `run` itself has R = never.
// InstanceStore imports only the lightweight tag from bootstrap-service.ts,
// so it can depend on bootstrap without importing this implementation graph.
// This breaks the circular declaration loop through Config → Instance → InstanceStore
// (instance-store.ts only yields this Service tag, never the impl-side services).
const bus = yield* Bus.Service
const config = yield* Config.Service
const file = yield* File.Service
const fileWatcher = yield* FileWatcher.Service
const format = yield* Format.Service
const lsp = yield* LSP.Service
const plugin = yield* Plugin.Service
const shareNext = yield* ShareNext.Service
const snapshot = yield* Snapshot.Service
const vcs = yield* Vcs.Service
@@ -39,8 +39,6 @@ export const layer = Layer.effect(
yield* Effect.logInfo("bootstrapping", { directory: ctx.directory })
// everything depends on config so eager load it for nice traces
yield* config.get()
// Plugin can mutate config so it has to be initialized before anything else.
yield* plugin.init()
yield* Effect.all(
[lsp, shareNext, format, file, fileWatcher, vcs, snapshot].map((s) => Effect.forkDetach(s.init())),
).pipe(Effect.withSpan("InstanceBootstrap.init"))
@@ -65,7 +63,6 @@ export const defaultLayer: Layer.Layer<Service> = layer.pipe(
FileWatcher.defaultLayer,
Format.defaultLayer,
LSP.defaultLayer,
Plugin.defaultLayer,
Project.defaultLayer,
ShareNext.defaultLayer,
Snapshot.defaultLayer,

View File

@@ -1,27 +0,0 @@
import { makeRuntime } from "@/effect/run-service"
import { type InstanceContext } from "./instance-context"
import { InstanceStore, type LoadInput } from "./instance-store"
import { Effect, Layer } from "effect"
// Bridge for Promise/ALS callers that cannot yet yield InstanceStore.Service.
// This keeps InstanceStore itself low-level while still giving legacy Hono and
// CLI paths the production bootstrap implementation. Delete this module once
// those callers are migrated to Effect boundaries that provide InstanceStore
// directly, like the HttpApi middleware does.
// Keep the bootstrap implementation import lazy: Instance is imported broadly,
// and importing the app bootstrap graph at module load can trigger ESM cycles.
export const layer = Layer.unwrap(
Effect.promise(async () => {
const { InstanceBootstrap } = await import("./bootstrap")
return InstanceStore.defaultLayer.pipe(Layer.provide(InstanceBootstrap.defaultLayer))
}),
)
const runtime = makeRuntime(InstanceStore.Service, layer)
export const load = (input: LoadInput) => runtime.runPromise((store) => store.load(input))
export const disposeInstance = (ctx: InstanceContext) => runtime.runPromise((store) => store.dispose(ctx))
export const disposeAllInstances = () => runtime.runPromise((store) => store.disposeAll())
export const reloadInstance = (input: LoadInput) => runtime.runPromise((store) => store.reload(input))
export * as InstanceRuntime from "./instance-runtime"

View File

@@ -2,10 +2,10 @@ import { GlobalBus } from "@/bus/global"
import { WorkspaceContext } from "@/control-plane/workspace-context"
import { InstanceRef } from "@/effect/instance-ref"
import { disposeInstance as runDisposers } from "@/effect/instance-registry"
import { makeRuntime } from "@/effect/run-service"
import { AppFileSystem } from "@opencode-ai/core/filesystem"
import { Context, Deferred, Duration, Effect, Exit, Layer, Scope } from "effect"
import { type InstanceContext } from "./instance-context"
import { InstanceBootstrap } from "./bootstrap-service"
import * as Project from "./project"
export interface LoadInput<R = never> {
@@ -36,11 +36,10 @@ interface Entry {
readonly deferred: Deferred.Deferred<InstanceContext>
}
export const layer: Layer.Layer<Service, never, Project.Service | InstanceBootstrap.Service> = Layer.effect(
export const layer: Layer.Layer<Service, never, Project.Service> = Layer.effect(
Service,
Effect.gen(function* () {
const project = yield* Project.Service
const bootstrap = yield* InstanceBootstrap.Service
const scope = yield* Scope.Scope
const cache = new Map<string, Entry>()
@@ -60,7 +59,6 @@ export const layer: Layer.Layer<Service, never, Project.Service | InstanceBootst
project: result.project,
})),
)
yield* bootstrap.run.pipe(Effect.provideService(InstanceRef, ctx))
if (input.init) yield* input.init.pipe(Effect.provideService(InstanceRef, ctx))
return ctx
}).pipe(Effect.withSpan("InstanceStore.boot"))
@@ -197,4 +195,13 @@ export const layer: Layer.Layer<Service, never, Project.Service | InstanceBootst
export const defaultLayer = layer.pipe(Layer.provide(Project.defaultLayer))
export const runtime = makeRuntime(Service, defaultLayer)
// Promise-returning helpers for callers without an Effect runtime in scope.
// They route through `runtime` (not a yielded Service from a fresh runtime)
// so they share the cache that `Instance.provide` populates.
export const disposeInstance = (ctx: InstanceContext) => runtime.runPromise((store) => store.dispose(ctx))
export const disposeAllInstances = () => runtime.runPromise((store) => store.disposeAll())
export const reloadInstance = (input: LoadInput) => runtime.runPromise((store) => store.reload(input))
export * as InstanceStore from "./instance-store"

View File

@@ -1,13 +1,15 @@
import { Effect } from "effect"
import { context, type InstanceContext } from "./instance-context"
import { InstanceRuntime } from "./instance-runtime"
import { InstanceStore } from "./instance-store"
export type { InstanceContext } from "./instance-context"
export type { LoadInput } from "./instance-store"
export const Instance = {
async provide<R>(input: { directory: string; init?: Effect.Effect<void>; fn: () => R }): Promise<R> {
const ctx = await InstanceRuntime.load({ directory: input.directory, init: input.init })
const ctx = await InstanceStore.runtime.runPromise((store) =>
store.load({ directory: input.directory, init: input.init }),
)
return context.provide(ctx, async () => input.fn())
},
get current() {

View File

@@ -1,25 +0,0 @@
import { GlobalBus } from "@/bus/global"
import { InstanceStore } from "@/project/instance-store"
import { Effect } from "effect"
import { Event } from "./event"
export const emitGlobalDisposed = Effect.sync(() =>
GlobalBus.emit("event", {
directory: "global",
payload: {
type: Event.Disposed.type,
properties: {},
},
}),
)
export const disposeAllInstancesAndEmitGlobalDisposed = Effect.fn(
"Server.disposeAllInstancesAndEmitGlobalDisposed",
)(function* (options?: { swallowErrors?: boolean }) {
const store = yield* InstanceStore.Service
const dispose = store.disposeAll()
yield* (options?.swallowErrors ? dispose.pipe(Effect.catch(() => Effect.void)) : dispose)
yield* emitGlobalDisposed
})
export * as GlobalLifecycle from "./global-lifecycle"

View File

@@ -1,23 +1,26 @@
import { Hono, type Context } from "hono"
import { describeRoute, resolver, validator } from "hono-openapi"
import { streamSSE } from "hono/streaming"
import { Effect } from "effect"
import { Effect, Schema } from "effect"
import z from "zod"
import { BusEvent } from "@/bus/bus-event"
import { SyncEvent } from "@/sync"
import { GlobalBus } from "@/bus/global"
import { Bus } from "@/bus"
import { AppRuntime } from "@/effect/app-runtime"
import { AsyncQueue } from "@/util/queue"
import { InstanceStore } from "../../project/instance-store"
import { Installation } from "@/installation"
import { InstallationVersion } from "@opencode-ai/core/installation/version"
import * as Log from "@opencode-ai/core/util/log"
import { lazy } from "../../util/lazy"
import { Config } from "@/config/config"
import { errors } from "../error"
import { disposeAllInstancesAndEmitGlobalDisposed } from "../global-lifecycle"
const log = Log.create({ service: "server" })
export const GlobalDisposedEvent = BusEvent.define("global.disposed", Schema.Struct({}))
async function streamEvents(c: Context, subscribe: (q: AsyncQueue<string | null>) => () => void) {
return streamSSE(c, async (stream) => {
const q = new AsyncQueue<string | null>()
@@ -26,6 +29,7 @@ async function streamEvents(c: Context, subscribe: (q: AsyncQueue<string | null>
q.push(
JSON.stringify({
payload: {
id: Bus.createID(),
type: "server.connected",
properties: {},
},
@@ -37,6 +41,7 @@ async function streamEvents(c: Context, subscribe: (q: AsyncQueue<string | null>
q.push(
JSON.stringify({
payload: {
id: Bus.createID(),
type: "server.heartbeat",
properties: {},
},
@@ -176,13 +181,8 @@ export const GlobalRoutes = lazy(() =>
validator("json", Config.Info.zod),
async (c) => {
const config = c.req.valid("json")
const result = await AppRuntime.runPromise(Config.Service.use((cfg) => cfg.updateGlobal(config)))
if (result.changed) {
void AppRuntime.runPromise(disposeAllInstancesAndEmitGlobalDisposed({ swallowErrors: true })).catch(
() => undefined,
)
}
return c.json(result.info)
const next = await AppRuntime.runPromise(Config.Service.use((cfg) => cfg.updateGlobal(config)))
return c.json(next)
},
)
.post(
@@ -203,7 +203,14 @@ export const GlobalRoutes = lazy(() =>
},
}),
async (c) => {
await AppRuntime.runPromise(disposeAllInstancesAndEmitGlobalDisposed())
await InstanceStore.disposeAllInstances()
GlobalBus.emit("event", {
directory: "global",
payload: {
type: GlobalDisposedEvent.type,
properties: {},
},
})
return c.json(true)
},
)

View File

@@ -2,8 +2,6 @@ import { Hono } from "hono"
import { describeRoute, validator, resolver } from "hono-openapi"
import z from "zod"
import { Config } from "@/config/config"
import { Instance } from "@/project/instance"
import { InstanceStore } from "@/project/instance-store"
import { Provider } from "@/provider/provider"
import { errors } from "../../error"
import { lazy } from "@/util/lazy"
@@ -57,9 +55,7 @@ export const ConfigRoutes = lazy(() =>
jsonRequest("ConfigRoutes.update", c, function* () {
const config = c.req.valid("json")
const cfg = yield* Config.Service
const store = yield* InstanceStore.Service
yield* cfg.update(config)
yield* store.dispose(Instance.current)
return config
}),
)

View File

@@ -42,6 +42,7 @@ export const EventRoutes = () =>
q.push(
JSON.stringify({
id: Bus.createID(),
type: "server.connected",
properties: {},
}),
@@ -50,9 +51,10 @@ export const EventRoutes = () =>
// Send heartbeat every 10s to prevent stalled proxy streams.
const heartbeat = setInterval(() => {
q.push(
JSON.stringify({
type: "server.heartbeat",
properties: {},
JSON.stringify({
id: Bus.createID(),
type: "server.heartbeat",
properties: {},
}),
)
}, 10_000)

View File

@@ -19,6 +19,7 @@ import { SessionApi } from "./groups/session"
import { SyncApi } from "./groups/sync"
import { TuiApi } from "./groups/tui"
import { WorkspaceApi } from "./groups/workspace"
import { V2Api } from "./groups/v2"
// SSE event schemas built from the same BusEvent/SyncEvent registries that
// the Hono spec uses, so both specs emit identical Event/SyncEvent components.
@@ -40,6 +41,7 @@ export const InstanceHttpApi = HttpApi.make("opencode-instance")
.addHttpApi(ProviderApi)
.addHttpApi(SessionApi)
.addHttpApi(SyncApi)
.addHttpApi(V2Api)
.addHttpApi(TuiApi)
.addHttpApi(WorkspaceApi)

View File

@@ -41,12 +41,12 @@ function eventResponse(bus: Bus.Interface) {
const events = bus.subscribeAll().pipe(Stream.takeUntil((event) => event.type === Bus.InstanceDisposed.type))
const heartbeat = Stream.tick("10 seconds").pipe(
Stream.drop(1),
Stream.map(() => ({ type: "server.heartbeat", properties: {} })),
Stream.map(() => ({ id: Bus.createID(), type: "server.heartbeat", properties: {} })),
)
log.info("event connected")
return HttpServerResponse.stream(
Stream.make({ type: "server.connected", properties: {} }).pipe(
Stream.make({ id: Bus.createID(), type: "server.connected", properties: {} }).pipe(
Stream.concat(events.pipe(Stream.merge(heartbeat, { haltStrategy: "left" }))),
Stream.map(eventData),
Stream.pipeThroughChannel(Sse.encode()),

View File

@@ -1,7 +1,6 @@
import { Config } from "@/config/config"
import { BusEvent } from "@/bus/bus-event"
import { SyncEvent } from "@/sync"
import "@/server/event"
import { Schema } from "effect"
import { HttpApi, HttpApiEndpoint, HttpApiError, HttpApiGroup, OpenApi } from "effect/unstable/httpapi"
import { described } from "./metadata"

View File

@@ -0,0 +1,14 @@
import { HttpApi, OpenApi } from "effect/unstable/httpapi"
import { MessageGroup } from "./v2/message"
import { SessionGroup } from "./v2/session"
export const V2Api = HttpApi.make("v2")
.add(SessionGroup)
.add(MessageGroup)
.annotateMerge(
OpenApi.annotations({
title: "opencode experimental HttpApi",
version: "0.0.1",
description: "Experimental HttpApi surface for selected instance routes.",
}),
)

View File

@@ -0,0 +1,69 @@
import { SessionID } from "@/session/schema"
import { SessionMessage } from "@/v2/session-message"
import { Schema } from "effect"
import { HttpApiEndpoint, HttpApiError, HttpApiGroup, OpenApi } from "effect/unstable/httpapi"
import { Authorization } from "../../middleware/authorization"
export const MessageGroup = HttpApiGroup.make("v2.message")
.add(
HttpApiEndpoint.get("messages", "/api/session/:sessionID/message", {
params: { sessionID: SessionID },
query: Schema.Union([
Schema.Struct({
limit: Schema.optional(
Schema.NumberFromString.check(
Schema.isInt(),
Schema.isGreaterThanOrEqualTo(1),
Schema.isLessThanOrEqualTo(200),
),
).annotate({
description:
"Maximum number of messages to return. When omitted, the endpoint returns its default page size.",
}),
order: Schema.optional(Schema.Union([Schema.Literal("asc"), Schema.Literal("desc")])).annotate({
description: "Message order for the first page. Use desc for newest first or asc for oldest first.",
}),
cursor: Schema.optional(Schema.Never),
}),
Schema.Struct({
limit: Schema.optional(
Schema.NumberFromString.check(
Schema.isInt(),
Schema.isGreaterThanOrEqualTo(1),
Schema.isLessThanOrEqualTo(200),
),
).annotate({
description:
"Maximum number of messages to return. When omitted, the endpoint returns its default page size.",
}),
cursor: Schema.String.annotate({
description:
"Opaque pagination cursor returned as cursor.previous or cursor.next in the previous response. Do not combine with order.",
}),
order: Schema.optional(Schema.Never),
}),
]).annotate({ identifier: "V2SessionMessagesQuery" }),
success: Schema.Struct({
items: Schema.Array(SessionMessage.Message),
cursor: Schema.Struct({
previous: Schema.String.pipe(Schema.optional),
next: Schema.String.pipe(Schema.optional),
}),
}).annotate({ identifier: "V2SessionMessagesResponse" }),
error: HttpApiError.BadRequest,
}).annotateMerge(
OpenApi.annotations({
identifier: "v2.session.messages",
summary: "Get v2 session messages",
description:
"Retrieve projected v2 messages for a session. Items keep the requested order across pages; use cursor.next or cursor.previous to move through the ordered timeline.",
}),
),
)
.annotateMerge(
OpenApi.annotations({
title: "v2 messages",
description: "Experimental v2 message routes.",
}),
)
.middleware(Authorization)

View File

@@ -0,0 +1,128 @@
import { WorkspaceID } from "@/control-plane/schema"
import { SessionID } from "@/session/schema"
import { SessionMessage } from "@/v2/session-message"
import { Prompt } from "@/v2/session-prompt"
import { SessionV2 } from "@/v2/session"
import { Schema, SchemaGetter } from "effect"
import { HttpApiEndpoint, HttpApiError, HttpApiGroup, HttpApiSchema, OpenApi } from "effect/unstable/httpapi"
import { Authorization } from "../../middleware/authorization"
export const SessionGroup = HttpApiGroup.make("v2.session")
.add(
HttpApiEndpoint.get("sessions", "/api/session", {
query: Schema.Union([
Schema.Struct({
limit: Schema.optional(
Schema.NumberFromString.check(
Schema.isInt(),
Schema.isGreaterThanOrEqualTo(1),
Schema.isLessThanOrEqualTo(200),
),
).annotate({
description: "Maximum number of sessions to return. Defaults to the newest 50 sessions.",
}),
order: Schema.optional(Schema.Union([Schema.Literal("asc"), Schema.Literal("desc")])).annotate({
description: "Session order for the first page. Use desc for newest first or asc for oldest first.",
}),
directory: Schema.String.pipe(Schema.optional),
path: Schema.String.pipe(Schema.optional),
workspace: WorkspaceID.pipe(Schema.optional),
roots: Schema.Literals(["true", "false"])
.pipe(
Schema.decodeTo(Schema.Boolean, {
decode: SchemaGetter.transform((value) => value === "true"),
encode: SchemaGetter.transform((value) => (value ? "true" : "false")),
}),
)
.pipe(Schema.optional),
start: Schema.NumberFromString.pipe(Schema.optional),
search: Schema.String.pipe(Schema.optional),
cursor: Schema.optional(Schema.Never),
}),
Schema.Struct({
limit: Schema.optional(
Schema.NumberFromString.check(
Schema.isInt(),
Schema.isGreaterThanOrEqualTo(1),
Schema.isLessThanOrEqualTo(200),
),
).annotate({
description: "Maximum number of sessions to return. Defaults to the newest 50 sessions.",
}),
cursor: Schema.String.annotate({
description:
"Opaque pagination cursor returned as cursor.previous or cursor.next in the previous response. Do not combine with order.",
}),
order: Schema.optional(Schema.Never),
directory: Schema.optional(Schema.Never),
path: Schema.optional(Schema.Never),
workspace: Schema.optional(Schema.Never),
roots: Schema.optional(Schema.Never),
start: Schema.optional(Schema.Never),
search: Schema.optional(Schema.Never),
}),
]).annotate({ identifier: "V2SessionsQuery" }),
success: Schema.Struct({
items: Schema.Array(SessionV2.Info),
cursor: Schema.Struct({
previous: Schema.String.pipe(Schema.optional),
next: Schema.String.pipe(Schema.optional),
}),
}).annotate({ identifier: "V2SessionsResponse" }),
error: HttpApiError.BadRequest,
}).annotateMerge(
OpenApi.annotations({
identifier: "v2.session.list",
summary: "List v2 sessions",
description:
"Retrieve sessions in the requested order. Items keep that order across pages; use cursor.next or cursor.previous to move through the ordered list.",
}),
),
)
.add(
HttpApiEndpoint.post("prompt", "/api/session/:sessionID/prompt", {
params: { sessionID: SessionID },
payload: Schema.Struct({
prompt: Prompt,
delivery: SessionV2.Delivery.pipe(Schema.optional),
}),
success: SessionMessage.Message,
}).annotateMerge(
OpenApi.annotations({
identifier: "v2.session.prompt",
summary: "Send v2 message",
description: "Create a v2 session message and queue it for the agent loop.",
}),
),
)
.add(
HttpApiEndpoint.post("compact", "/api/session/:sessionID/compact", {
params: { sessionID: SessionID },
success: HttpApiSchema.NoContent,
}).annotateMerge(
OpenApi.annotations({
identifier: "v2.session.compact",
summary: "Compact v2 session",
description: "Compact a v2 session conversation.",
}),
),
)
.add(
HttpApiEndpoint.post("wait", "/api/session/:sessionID/wait", {
params: { sessionID: SessionID },
success: HttpApiSchema.NoContent,
}).annotateMerge(
OpenApi.annotations({
identifier: "v2.session.wait",
summary: "Wait for v2 session",
description: "Wait for a v2 session agent loop to become idle.",
}),
),
)
.annotateMerge(
OpenApi.annotations({
title: "v2",
description: "Experimental v2 routes.",
}),
)
.middleware(Authorization)

View File

@@ -16,7 +16,7 @@ export const configHandlers = HttpApiBuilder.group(InstanceHttpApi, "config", (h
})
const update = Effect.fn("ConfigHttpApi.update")(function* (ctx) {
yield* configSvc.update(ctx.payload)
yield* configSvc.update(ctx.payload, { dispose: false })
yield* markInstanceForDisposal(yield* InstanceState.context)
return ctx.payload
})

View File

@@ -1,8 +1,8 @@
import { Config } from "@/config/config"
import { GlobalBus, type GlobalEvent as GlobalBusEvent } from "@/bus/global"
import { EffectBridge } from "@/effect/bridge"
import { Bus } from "@/bus"
import { Installation } from "@/installation"
import { disposeAllInstancesAndEmitGlobalDisposed } from "@/server/global-lifecycle"
import { InstanceStore } from "@/project/instance-store"
import { InstallationVersion } from "@opencode-ai/core/installation/version"
import * as Log from "@opencode-ai/core/util/log"
import { Effect, Queue, Schema } from "effect"
@@ -43,11 +43,11 @@ function eventResponse() {
})
const heartbeat = Stream.tick("10 seconds").pipe(
Stream.drop(1),
Stream.map(() => ({ payload: { type: "server.heartbeat", properties: {} } })),
Stream.map(() => ({ payload: { id: Bus.createID(), type: "server.heartbeat", properties: {} } })),
)
return HttpServerResponse.stream(
Stream.make({ payload: { type: "server.connected", properties: {} } }).pipe(
Stream.make({ payload: { id: Bus.createID(), type: "server.connected", properties: {} } }).pipe(
Stream.concat(events.pipe(Stream.merge(heartbeat, { haltStrategy: "left" }))),
Stream.map(eventData),
Stream.pipeThroughChannel(Sse.encode()),
@@ -69,7 +69,7 @@ export const globalHandlers = HttpApiBuilder.group(RootHttpApi, "global", (handl
Effect.gen(function* () {
const config = yield* Config.Service
const installation = yield* Installation.Service
const bridge = yield* EffectBridge.make()
const store = yield* InstanceStore.Service
const health = Effect.fn("GlobalHttpApi.health")(function* () {
return { healthy: true as const, version: InstallationVersion }
@@ -84,13 +84,15 @@ export const globalHandlers = HttpApiBuilder.group(RootHttpApi, "global", (handl
})
const configUpdate = Effect.fn("GlobalHttpApi.configUpdate")(function* (ctx) {
const result = yield* config.updateGlobal(ctx.payload)
if (result.changed) bridge.fork(disposeAllInstancesAndEmitGlobalDisposed({ swallowErrors: true }))
return result.info
return yield* config.updateGlobal(ctx.payload)
})
const dispose = Effect.fn("GlobalHttpApi.dispose")(function* () {
yield* disposeAllInstancesAndEmitGlobalDisposed()
yield* store.disposeAll()
GlobalBus.emit("event", {
directory: "global",
payload: { type: "global.disposed", properties: {} },
})
return true
})

View File

@@ -0,0 +1,6 @@
import { SessionV2 } from "@/v2/session"
import { Layer } from "effect"
import { messageHandlers } from "./v2/message"
import { sessionHandlers } from "./v2/session"
export const v2Handlers = Layer.mergeAll(sessionHandlers, messageHandlers).pipe(Layer.provide(SessionV2.defaultLayer))

View File

@@ -0,0 +1,60 @@
import { SessionMessage } from "@/v2/session-message"
import { SessionV2 } from "@/v2/session"
import { Effect, Schema } from "effect"
import * as DateTime from "effect/DateTime"
import { HttpApiBuilder, HttpApiError } from "effect/unstable/httpapi"
import { InstanceHttpApi } from "../../api"
const DefaultMessagesLimit = 50
const Cursor = Schema.Struct({
id: SessionMessage.ID,
time: Schema.Number,
order: Schema.Union([Schema.Literal("asc"), Schema.Literal("desc")]),
direction: Schema.Union([Schema.Literal("previous"), Schema.Literal("next")]),
})
const decodeCursor = Schema.decodeUnknownSync(Cursor)
const cursor = {
encode(message: SessionMessage.Message, order: "asc" | "desc", direction: "previous" | "next") {
return Buffer.from(
JSON.stringify({ id: message.id, time: DateTime.toEpochMillis(message.time.created), order, direction }),
).toString("base64url")
},
decode(input: string) {
return decodeCursor(JSON.parse(Buffer.from(input, "base64url").toString("utf8")))
},
}
export const messageHandlers = HttpApiBuilder.group(InstanceHttpApi, "v2.message", (handlers) =>
Effect.gen(function* () {
const session = yield* SessionV2.Service
return handlers.handle(
"messages",
Effect.fn(function* (ctx) {
const decoded = yield* Effect.try({
try: () => (ctx.query.cursor ? cursor.decode(ctx.query.cursor) : undefined),
catch: () => new HttpApiError.BadRequest({}),
})
const order = decoded?.order ?? ctx.query.order ?? "desc"
const messages = yield* session.messages({
sessionID: ctx.params.sessionID,
limit: ctx.query.limit ?? DefaultMessagesLimit,
order,
cursor: decoded ? { id: decoded.id, time: decoded.time, direction: decoded.direction } : undefined,
})
const first = messages[0]
const last = messages.at(-1)
return {
items: messages,
cursor: {
previous: first ? cursor.encode(first, order, "previous") : undefined,
next: last ? cursor.encode(last, order, "next") : undefined,
},
}
}),
)
}),
)

View File

@@ -0,0 +1,109 @@
import { WorkspaceID } from "@/control-plane/schema"
import { SessionV2 } from "@/v2/session"
import { Effect, Schema } from "effect"
import { HttpApiBuilder, HttpApiError, HttpApiSchema } from "effect/unstable/httpapi"
import { InstanceHttpApi } from "../../api"
const DefaultSessionsLimit = 50
const SessionCursor = Schema.Struct({
id: SessionV2.Info.fields.id,
time: Schema.Number,
order: Schema.Union([Schema.Literal("asc"), Schema.Literal("desc")]),
direction: Schema.Union([Schema.Literal("previous"), Schema.Literal("next")]),
directory: Schema.String.pipe(Schema.optional),
path: Schema.String.pipe(Schema.optional),
workspaceID: WorkspaceID.pipe(Schema.optional),
roots: Schema.Boolean.pipe(Schema.optional),
start: Schema.Number.pipe(Schema.optional),
search: Schema.String.pipe(Schema.optional),
})
type SessionCursor = typeof SessionCursor.Type
const decodeCursor = Schema.decodeUnknownSync(SessionCursor)
const sessionCursor = {
encode(
session: SessionV2.Info,
order: "asc" | "desc",
direction: "previous" | "next",
filters: Pick<SessionCursor, "directory" | "path" | "workspaceID" | "roots" | "start" | "search">,
) {
return Buffer.from(
JSON.stringify({ id: session.id, time: session.time.created, order, direction, ...filters }),
).toString("base64url")
},
decode(input: string) {
return decodeCursor(JSON.parse(Buffer.from(input, "base64url").toString("utf8")))
},
}
export const sessionHandlers = HttpApiBuilder.group(InstanceHttpApi, "v2.session", (handlers) =>
Effect.gen(function* () {
const session = yield* SessionV2.Service
return handlers
.handle(
"sessions",
Effect.fn(function* (ctx) {
const decoded = yield* Effect.try({
try: () => (ctx.query.cursor ? sessionCursor.decode(ctx.query.cursor) : undefined),
catch: () => new HttpApiError.BadRequest({}),
})
const order = decoded?.order ?? ctx.query.order ?? "desc"
const filters = decoded ?? {
directory: ctx.query.directory,
path: ctx.query.path,
workspaceID: ctx.query.workspace ? WorkspaceID.make(ctx.query.workspace) : undefined,
roots: ctx.query.roots,
start: ctx.query.start,
search: ctx.query.search,
}
const sessions = yield* session.list({
limit: ctx.query.limit ?? DefaultSessionsLimit,
order,
directory: filters.directory,
path: filters.path,
workspaceID: filters.workspaceID,
roots: filters.roots,
start: filters.start,
search: filters.search,
cursor: decoded ? { id: decoded.id, time: decoded.time, direction: decoded.direction } : undefined,
})
const first = sessions[0]
const last = sessions.at(-1)
return {
items: sessions,
cursor: {
previous: first ? sessionCursor.encode(first, order, "previous", filters) : undefined,
next: last ? sessionCursor.encode(last, order, "next", filters) : undefined,
},
}
}),
)
.handle(
"prompt",
Effect.fn(function* (ctx) {
return yield* session.prompt({
sessionID: ctx.params.sessionID,
prompt: ctx.payload.prompt,
delivery: ctx.payload.delivery ?? SessionV2.DefaultDelivery,
})
}),
)
.handle(
"compact",
Effect.fn(function* (ctx) {
yield* session.compact(ctx.params.sessionID)
return HttpApiSchema.NoContent.make()
}),
)
.handle(
"wait",
Effect.fn(function* (ctx) {
yield* session.wait(ctx.params.sessionID)
return HttpApiSchema.NoContent.make()
}),
)
}),
)

View File

@@ -1,4 +1,5 @@
import { WorkspaceRef } from "@/effect/instance-ref"
import { InstanceBootstrap } from "@/project/bootstrap"
import { InstanceStore } from "@/project/instance-store"
import { Effect, Layer } from "effect"
import { HttpRouter, HttpServerResponse } from "effect/unstable/http"
@@ -23,11 +24,12 @@ function decode(input: string): string {
function provideInstanceContext<E>(
effect: Effect.Effect<HttpServerResponse.HttpServerResponse, E>,
store: InstanceStore.Interface,
bootstrap: InstanceBootstrap.Interface,
): Effect.Effect<HttpServerResponse.HttpServerResponse, E, WorkspaceRouteContext> {
return Effect.gen(function* () {
const route = yield* WorkspaceRouteContext
return yield* store.provide(
{ directory: decode(route.directory) },
{ directory: decode(route.directory), init: bootstrap.run },
effect.pipe(Effect.provideService(WorkspaceRef, route.workspaceID)),
)
})
@@ -37,13 +39,15 @@ export const instanceContextLayer = Layer.effect(
InstanceContextMiddleware,
Effect.gen(function* () {
const store = yield* InstanceStore.Service
return InstanceContextMiddleware.of((effect) => provideInstanceContext(effect, store))
const bootstrap = yield* InstanceBootstrap.Service
return InstanceContextMiddleware.of((effect) => provideInstanceContext(effect, store, bootstrap))
}),
)
export const instanceRouterMiddleware = HttpRouter.middleware()(
Effect.gen(function* () {
const store = yield* InstanceStore.Service
return (effect) => provideInstanceContext(effect, store)
const bootstrap = yield* InstanceBootstrap.Service
return (effect) => provideInstanceContext(effect, store, bootstrap)
}),
)

View File

@@ -18,7 +18,8 @@ import { LSP } from "@/lsp/lsp"
import { MCP } from "@/mcp"
import { Permission } from "@/permission"
import { Installation } from "@/installation"
import { InstanceRuntime } from "@/project/instance-runtime"
import { InstanceBootstrap } from "@/project/bootstrap"
import { InstanceStore } from "@/project/instance-store"
import { Plugin } from "@/plugin"
import { Project } from "@/project/project"
import { ProviderAuth } from "@/provider/auth"
@@ -64,6 +65,7 @@ import { questionHandlers } from "./handlers/question"
import { sessionHandlers } from "./handlers/session"
import { syncHandlers } from "./handlers/sync"
import { tuiHandlers } from "./handlers/tui"
import { v2Handlers } from "./handlers/v2"
import { workspaceHandlers } from "./handlers/workspace"
import { instanceContextLayer, instanceRouterMiddleware } from "./middleware/instance-context"
import { workspaceRouterMiddleware, workspaceRoutingLayer } from "./middleware/workspace-routing"
@@ -115,6 +117,7 @@ const instanceApiRoutes = HttpApiBuilder.layer(InstanceHttpApi).pipe(
providerHandlers,
sessionHandlers,
syncHandlers,
v2Handlers,
tuiHandlers,
workspaceHandlers,
]),
@@ -152,7 +155,8 @@ export function createRoutes(corsOptions?: CorsOptions) {
Format.defaultLayer,
LSP.defaultLayer,
Installation.defaultLayer,
InstanceRuntime.layer,
InstanceBootstrap.defaultLayer,
InstanceStore.defaultLayer,
MCP.defaultLayer,
ModelsDev.defaultLayer,
Permission.defaultLayer,

View File

@@ -1,12 +1,13 @@
import { describeRoute, resolver, validator } from "hono-openapi"
import { Hono } from "hono"
import type { UpgradeWebSocket } from "hono/ws"
import { Effect } from "effect"
import { Context, Effect } from "effect"
import { Flag } from "@opencode-ai/core/flag/flag"
import z from "zod"
import { Format } from "@/format"
import { TuiRoutes } from "./tui"
import { Instance } from "@/project/instance"
import { InstanceRuntime } from "@/project/instance-runtime"
import { InstanceStore } from "@/project/instance-store"
import { Vcs } from "@/project/vcs"
import { Agent } from "@/agent/agent"
import { Skill } from "@/skill"
@@ -25,12 +26,136 @@ import { ExperimentalRoutes } from "./experimental"
import { ProviderRoutes } from "./provider"
import { EventRoutes } from "./event"
import { SyncRoutes } from "./sync"
import { V2Routes } from "./v2"
import { InstanceMiddleware } from "./middleware"
import { jsonRequest } from "./trace"
import { ExperimentalHttpApiServer } from "./httpapi/server"
import { EventPaths } from "./httpapi/event"
import { ExperimentalPaths } from "./httpapi/groups/experimental"
import { FilePaths } from "./httpapi/groups/file"
import { InstancePaths } from "./httpapi/groups/instance"
import { McpPaths } from "./httpapi/groups/mcp"
import { PtyPaths } from "./httpapi/groups/pty"
import { SessionPaths } from "./httpapi/groups/session"
import { SyncPaths } from "./httpapi/groups/sync"
import { TuiPaths } from "./httpapi/groups/tui"
import { WorkspacePaths } from "./httpapi/groups/workspace"
export const InstanceRoutes = (upgrade: UpgradeWebSocket): Hono => {
const app = new Hono()
if (Flag.OPENCODE_EXPERIMENTAL_HTTPAPI) {
const handler = ExperimentalHttpApiServer.webHandler().handler
const context = Context.empty() as Context.Context<unknown>
app.all("/api/*", (c) => handler(c.req.raw, context))
app.get(EventPaths.event, (c) => handler(c.req.raw, context))
app.get("/question", (c) => handler(c.req.raw, context))
app.post("/question/:requestID/reply", (c) => handler(c.req.raw, context))
app.post("/question/:requestID/reject", (c) => handler(c.req.raw, context))
app.get("/permission", (c) => handler(c.req.raw, context))
app.post("/permission/:requestID/reply", (c) => handler(c.req.raw, context))
app.get("/config", (c) => handler(c.req.raw, context))
app.patch("/config", (c) => handler(c.req.raw, context))
app.get("/config/providers", (c) => handler(c.req.raw, context))
app.get(ExperimentalPaths.console, (c) => handler(c.req.raw, context))
app.get(ExperimentalPaths.consoleOrgs, (c) => handler(c.req.raw, context))
app.post(ExperimentalPaths.consoleSwitch, (c) => handler(c.req.raw, context))
app.get(ExperimentalPaths.tool, (c) => handler(c.req.raw, context))
app.get(ExperimentalPaths.toolIDs, (c) => handler(c.req.raw, context))
app.get(ExperimentalPaths.worktree, (c) => handler(c.req.raw, context))
app.post(ExperimentalPaths.worktree, (c) => handler(c.req.raw, context))
app.delete(ExperimentalPaths.worktree, (c) => handler(c.req.raw, context))
app.post(ExperimentalPaths.worktreeReset, (c) => handler(c.req.raw, context))
app.get(ExperimentalPaths.session, (c) => handler(c.req.raw, context))
app.get(ExperimentalPaths.resource, (c) => handler(c.req.raw, context))
app.get("/provider", (c) => handler(c.req.raw, context))
app.get("/provider/auth", (c) => handler(c.req.raw, context))
app.post("/provider/:providerID/oauth/authorize", (c) => handler(c.req.raw, context))
app.post("/provider/:providerID/oauth/callback", (c) => handler(c.req.raw, context))
app.get("/project", (c) => handler(c.req.raw, context))
app.get("/project/current", (c) => handler(c.req.raw, context))
app.post("/project/git/init", (c) => handler(c.req.raw, context))
app.patch("/project/:projectID", (c) => handler(c.req.raw, context))
app.get(FilePaths.findText, (c) => handler(c.req.raw, context))
app.get(FilePaths.findFile, (c) => handler(c.req.raw, context))
app.get(FilePaths.findSymbol, (c) => handler(c.req.raw, context))
app.get(FilePaths.list, (c) => handler(c.req.raw, context))
app.get(FilePaths.content, (c) => handler(c.req.raw, context))
app.get(FilePaths.status, (c) => handler(c.req.raw, context))
app.get(InstancePaths.path, (c) => handler(c.req.raw, context))
app.post(InstancePaths.dispose, (c) => handler(c.req.raw, context))
app.get(InstancePaths.vcs, (c) => handler(c.req.raw, context))
app.get(InstancePaths.vcsDiff, (c) => handler(c.req.raw, context))
app.get(InstancePaths.command, (c) => handler(c.req.raw, context))
app.get(InstancePaths.agent, (c) => handler(c.req.raw, context))
app.get(InstancePaths.skill, (c) => handler(c.req.raw, context))
app.get(InstancePaths.lsp, (c) => handler(c.req.raw, context))
app.get(InstancePaths.formatter, (c) => handler(c.req.raw, context))
app.get(McpPaths.status, (c) => handler(c.req.raw, context))
app.post(McpPaths.status, (c) => handler(c.req.raw, context))
app.post(McpPaths.auth, (c) => handler(c.req.raw, context))
app.post(McpPaths.authCallback, (c) => handler(c.req.raw, context))
app.post(McpPaths.authAuthenticate, (c) => handler(c.req.raw, context))
app.delete(McpPaths.auth, (c) => handler(c.req.raw, context))
app.post(McpPaths.connect, (c) => handler(c.req.raw, context))
app.post(McpPaths.disconnect, (c) => handler(c.req.raw, context))
app.post(SyncPaths.start, (c) => handler(c.req.raw, context))
app.post(SyncPaths.replay, (c) => handler(c.req.raw, context))
app.post(SyncPaths.history, (c) => handler(c.req.raw, context))
app.get(PtyPaths.list, (c) => handler(c.req.raw, context))
app.post(PtyPaths.create, (c) => handler(c.req.raw, context))
app.get(PtyPaths.get, (c) => handler(c.req.raw, context))
app.put(PtyPaths.update, (c) => handler(c.req.raw, context))
app.delete(PtyPaths.remove, (c) => handler(c.req.raw, context))
app.get(PtyPaths.connect, (c) => handler(c.req.raw, context))
app.get(SessionPaths.list, (c) => handler(c.req.raw, context))
app.get(SessionPaths.status, (c) => handler(c.req.raw, context))
app.get(SessionPaths.get, (c) => handler(c.req.raw, context))
app.get(SessionPaths.children, (c) => handler(c.req.raw, context))
app.get(SessionPaths.todo, (c) => handler(c.req.raw, context))
app.get(SessionPaths.diff, (c) => handler(c.req.raw, context))
app.get(SessionPaths.messages, (c) => handler(c.req.raw, context))
app.get(SessionPaths.message, (c) => handler(c.req.raw, context))
app.post(SessionPaths.create, (c) => handler(c.req.raw, context))
app.delete(SessionPaths.remove, (c) => handler(c.req.raw, context))
app.patch(SessionPaths.update, (c) => handler(c.req.raw, context))
app.post(SessionPaths.init, (c) => handler(c.req.raw, context))
app.post(SessionPaths.fork, (c) => handler(c.req.raw, context))
app.post(SessionPaths.abort, (c) => handler(c.req.raw, context))
app.post(SessionPaths.share, (c) => handler(c.req.raw, context))
app.delete(SessionPaths.share, (c) => handler(c.req.raw, context))
app.post(SessionPaths.summarize, (c) => handler(c.req.raw, context))
app.post(SessionPaths.prompt, (c) => handler(c.req.raw, context))
app.post(SessionPaths.promptAsync, (c) => handler(c.req.raw, context))
app.post(SessionPaths.command, (c) => handler(c.req.raw, context))
app.post(SessionPaths.shell, (c) => handler(c.req.raw, context))
app.post(SessionPaths.revert, (c) => handler(c.req.raw, context))
app.post(SessionPaths.unrevert, (c) => handler(c.req.raw, context))
app.post(SessionPaths.permissions, (c) => handler(c.req.raw, context))
app.delete(SessionPaths.deleteMessage, (c) => handler(c.req.raw, context))
app.delete(SessionPaths.deletePart, (c) => handler(c.req.raw, context))
app.patch(SessionPaths.updatePart, (c) => handler(c.req.raw, context))
app.post(TuiPaths.appendPrompt, (c) => handler(c.req.raw, context))
app.post(TuiPaths.openHelp, (c) => handler(c.req.raw, context))
app.post(TuiPaths.openSessions, (c) => handler(c.req.raw, context))
app.post(TuiPaths.openThemes, (c) => handler(c.req.raw, context))
app.post(TuiPaths.openModels, (c) => handler(c.req.raw, context))
app.post(TuiPaths.submitPrompt, (c) => handler(c.req.raw, context))
app.post(TuiPaths.clearPrompt, (c) => handler(c.req.raw, context))
app.post(TuiPaths.executeCommand, (c) => handler(c.req.raw, context))
app.post(TuiPaths.showToast, (c) => handler(c.req.raw, context))
app.post(TuiPaths.publish, (c) => handler(c.req.raw, context))
app.post(TuiPaths.selectSession, (c) => handler(c.req.raw, context))
app.get(TuiPaths.controlNext, (c) => handler(c.req.raw, context))
app.post(TuiPaths.controlResponse, (c) => handler(c.req.raw, context))
app.get(WorkspacePaths.adapters, (c) => handler(c.req.raw, context))
app.post(WorkspacePaths.list, (c) => handler(c.req.raw, context))
app.get(WorkspacePaths.list, (c) => handler(c.req.raw, context))
app.get(WorkspacePaths.status, (c) => handler(c.req.raw, context))
app.delete(WorkspacePaths.remove, (c) => handler(c.req.raw, context))
app.post(WorkspacePaths.sessionRestore, (c) => handler(c.req.raw, context))
}
return app
.route("/project", ProjectRoutes())
.route("/pty", PtyRoutes(upgrade))
@@ -41,6 +166,7 @@ export const InstanceRoutes = (upgrade: UpgradeWebSocket): Hono => {
.route("/question", QuestionRoutes())
.route("/provider", ProviderRoutes())
.route("/sync", SyncRoutes())
.route("/api", V2Routes())
.route("/", FileRoutes())
.route("/", EventRoutes())
.route("/mcp", McpRoutes())
@@ -63,7 +189,7 @@ export const InstanceRoutes = (upgrade: UpgradeWebSocket): Hono => {
},
}),
async (c) => {
await InstanceRuntime.disposeInstance(Instance.current)
await InstanceStore.disposeInstance(Instance.current)
return c.json(true)
},
)

View File

@@ -1,5 +1,6 @@
import type { MiddlewareHandler } from "hono"
import { Instance } from "@/project/instance"
import { getBootstrapRunEffect } from "@/effect/app-runtime"
import { AppFileSystem } from "@opencode-ai/core/filesystem"
import { WorkspaceContext } from "@/control-plane/workspace-context"
import { WorkspaceID } from "@/control-plane/schema"
@@ -22,6 +23,7 @@ export function InstanceMiddleware(workspaceID?: WorkspaceID): MiddlewareHandler
async fn() {
return Instance.provide({
directory,
init: await getBootstrapRunEffect(),
async fn() {
return next()
},

View File

@@ -2,12 +2,13 @@ import { Hono } from "hono"
import { describeRoute, validator } from "hono-openapi"
import { resolver } from "hono-openapi"
import { Instance } from "@/project/instance"
import { InstanceRuntime } from "@/project/instance-runtime"
import { InstanceStore } from "@/project/instance-store"
import { Project } from "@/project/project"
import z from "zod"
import { ProjectID } from "@/project/schema"
import { errors } from "../../error"
import { lazy } from "@/util/lazy"
import { getBootstrapRunEffect } from "@/effect/app-runtime"
import { jsonRequest, runRequest } from "./trace"
export const ProjectRoutes = lazy(() =>
@@ -81,7 +82,12 @@ export const ProjectRoutes = lazy(() =>
Project.Service.use((svc) => svc.initGit({ directory: dir, project: prev })),
)
if (next.id === prev.id && next.vcs === prev.vcs && next.worktree === prev.worktree) return c.json(next)
await InstanceRuntime.reloadInstance({ directory: dir, worktree: dir, project: next })
await InstanceStore.reloadInstance({
directory: dir,
worktree: dir,
project: next,
init: await getBootstrapRunEffect(),
})
return c.json(next)
},
)

View File

@@ -0,0 +1,229 @@
import { WorkspaceID } from "@/control-plane/schema"
import { SessionID } from "@/session/schema"
import { SessionMessage } from "@/v2/session-message"
import { SessionV2 } from "@/v2/session"
import { zod } from "@/util/effect-zod"
import { lazy } from "@/util/lazy"
import { Effect, Schema } from "effect"
import * as DateTime from "effect/DateTime"
import { Hono } from "hono"
import { describeRoute, resolver, validator } from "hono-openapi"
import { HTTPException } from "hono/http-exception"
import z from "zod"
import { errors } from "../../error"
import { jsonRequest } from "./trace"
const DefaultMessagesLimit = 50
const DefaultSessionsLimit = 50
const SessionCursor = Schema.Struct({
id: SessionID,
time: Schema.Number,
order: Schema.Union([Schema.Literal("asc"), Schema.Literal("desc")]),
direction: Schema.Union([Schema.Literal("previous"), Schema.Literal("next")]),
directory: Schema.String.pipe(Schema.optional),
path: Schema.String.pipe(Schema.optional),
workspaceID: WorkspaceID.pipe(Schema.optional),
roots: Schema.Boolean.pipe(Schema.optional),
start: Schema.Number.pipe(Schema.optional),
search: Schema.String.pipe(Schema.optional),
})
type SessionCursor = typeof SessionCursor.Type
const SessionsResponse = Schema.Struct({
items: Schema.Array(SessionV2.Info),
cursor: Schema.Struct({
previous: Schema.String.pipe(Schema.optional),
next: Schema.String.pipe(Schema.optional),
}),
}).annotate({ identifier: "V2SessionsResponse" })
const Cursor = Schema.Struct({
id: SessionMessage.ID,
time: Schema.Number,
order: Schema.Union([Schema.Literal("asc"), Schema.Literal("desc")]),
direction: Schema.Union([Schema.Literal("previous"), Schema.Literal("next")]),
})
const MessagesResponse = Schema.Struct({
items: Schema.Array(SessionMessage.Message),
cursor: Schema.Struct({
previous: Schema.String.pipe(Schema.optional),
next: Schema.String.pipe(Schema.optional),
}),
}).annotate({ identifier: "V2SessionMessagesResponse" })
const decodeCursor = Schema.decodeUnknownSync(Cursor)
const decodeSessionCursor = Schema.decodeUnknownSync(SessionCursor)
const sessionCursor = {
encode(
session: SessionV2.Info,
order: "asc" | "desc",
direction: "previous" | "next",
filters: Pick<SessionCursor, "directory" | "path" | "workspaceID" | "roots" | "start" | "search">,
) {
return Buffer.from(
JSON.stringify({ id: session.id, time: session.time.created, order, direction, ...filters }),
).toString("base64url")
},
decode(input: string) {
return decodeSessionCursor(JSON.parse(Buffer.from(input, "base64url").toString("utf8")))
},
}
const cursor = {
encode(message: SessionMessage.Message, order: "asc" | "desc", direction: "previous" | "next") {
return Buffer.from(
JSON.stringify({ id: message.id, time: DateTime.toEpochMillis(message.time.created), order, direction }),
).toString("base64url")
},
decode(input: string) {
return decodeCursor(JSON.parse(Buffer.from(input, "base64url").toString("utf8")))
},
}
export const V2Routes = lazy(() =>
new Hono()
.get(
"/session",
describeRoute({
summary: "List v2 sessions",
description:
"Retrieve sessions in the requested order. Items keep that order across pages; use cursor.next or cursor.previous to move through the ordered list.",
operationId: "v2.session.list",
responses: {
200: {
description: "List of v2 sessions",
content: {
"application/json": {
schema: resolver(zod(SessionsResponse)),
},
},
},
...errors(400),
},
}),
validator(
"query",
z.object({
limit: z.coerce.number().int().min(1).max(200).optional(),
cursor: z.string().optional(),
order: z.enum(["asc", "desc"]).optional(),
directory: z.string().optional(),
path: z.string().optional(),
workspace: WorkspaceID.zod.optional(),
roots: z
.enum(["true", "false"])
.transform((value) => value === "true")
.optional(),
start: z.coerce.number().optional(),
search: z.string().optional(),
}),
),
async (c) => {
const query = c.req.valid("query")
const decoded = (() => {
try {
return query.cursor ? sessionCursor.decode(query.cursor) : undefined
} catch {
throw new HTTPException(400)
}
})()
const order = decoded?.order ?? query.order ?? "desc"
const filters = decoded ?? {
directory: query.directory,
path: query.path,
workspaceID: query.workspace,
roots: query.roots,
start: query.start,
search: query.search,
}
return jsonRequest("V2Routes.sessions", c, function* () {
return yield* Effect.gen(function* () {
const session = yield* SessionV2.Service
const sessions = yield* session.list({
limit: query.limit ?? DefaultSessionsLimit,
order,
directory: filters.directory,
path: filters.path,
workspaceID: filters.workspaceID,
roots: filters.roots,
start: filters.start,
search: filters.search,
cursor: decoded ? { id: decoded.id, time: decoded.time, direction: decoded.direction } : undefined,
})
const first = sessions[0]
const last = sessions.at(-1)
return {
items: sessions,
cursor: {
previous: first ? sessionCursor.encode(first, order, "previous", filters) : undefined,
next: last ? sessionCursor.encode(last, order, "next", filters) : undefined,
},
}
}).pipe(Effect.provide(SessionV2.defaultLayer))
})
},
)
.get(
"/session/:sessionID/message",
describeRoute({
summary: "Get v2 session messages",
description: "Retrieve projected v2 messages for a session directly from the message database.",
operationId: "v2.session.messages",
responses: {
200: {
description: "List of v2 session messages",
content: {
"application/json": {
schema: resolver(zod(MessagesResponse)),
},
},
},
...errors(400, 404),
},
}),
validator("param", z.object({ sessionID: SessionID.zod })),
validator(
"query",
z.object({
limit: z.coerce.number().int().min(1).max(200).optional(),
cursor: z.string().optional(),
order: z.enum(["asc", "desc"]).optional(),
}),
),
async (c) => {
const sessionID = c.req.valid("param").sessionID
const query = c.req.valid("query")
const decoded = (() => {
try {
return query.cursor ? cursor.decode(query.cursor) : undefined
} catch {
throw new HTTPException(400)
}
})()
const order = decoded?.order ?? query.order ?? "desc"
return jsonRequest("V2Routes.messages", c, function* () {
return yield* Effect.gen(function* () {
const session = yield* SessionV2.Service
const messages = yield* session.messages({
sessionID,
limit: query.limit ?? DefaultMessagesLimit,
order,
cursor: decoded ? { id: decoded.id, time: decoded.time, direction: decoded.direction } : undefined,
})
const first = messages[0]
const last = messages.at(-1)
return {
items: messages,
cursor: {
previous: first ? cursor.encode(first, order, "previous") : undefined,
next: last ? cursor.encode(last, order, "next") : undefined,
},
}
}).pipe(Effect.provide(SessionV2.defaultLayer))
})
},
),
)

View File

@@ -5,7 +5,7 @@ import { WorkspaceID } from "@/control-plane/schema"
import { WorkspaceContext } from "@/control-plane/workspace-context"
import { Workspace } from "@/control-plane/workspace"
import { Flag } from "@opencode-ai/core/flag/flag"
import { AppRuntime } from "@/effect/app-runtime"
import { getBootstrapRunEffect, AppRuntime } from "@/effect/app-runtime"
import { Instance } from "@/project/instance"
import { Session } from "@/session/session"
import { SessionID } from "@/session/schema"
@@ -94,11 +94,13 @@ export function WorkspaceRouterMiddleware(upgrade: UpgradeWebSocket): Middleware
const target = await adapter.target(workspace)
if (target.type === "local") {
const init = await getBootstrapRunEffect()
return WorkspaceContext.provide({
workspaceID: WorkspaceID.make(workspaceID),
fn: () =>
Instance.provide({
directory: target.directory,
init,
async fn() {
return next()
},

View File

@@ -14,10 +14,13 @@ import { Config } from "@/config/config"
import { NotFoundError } from "@/storage/storage"
import { ModelID, ProviderID } from "@/provider/schema"
import { Effect, Layer, Context, Schema } from "effect"
import * as DateTime from "effect/DateTime"
import { InstanceState } from "@/effect/instance-state"
import { isOverflow as overflow, usable } from "./overflow"
import { makeRuntime } from "@/effect/run-service"
import { fn } from "@/util/fn"
import { EventV2 } from "@/v2/event"
import { SessionEvent } from "@/v2/session-event"
const log = Log.create({ service: "session.compaction" })
@@ -556,7 +559,21 @@ export const layer: Layer.Layer<
}
if (processor.message.error) return "stop"
if (result === "continue") yield* bus.publish(Event.Compacted, { sessionID: input.sessionID })
if (result === "continue") {
const summary = summaryText(
(yield* session.messages({ sessionID: input.sessionID })).find((item) => item.info.id === msg.id) ?? {
info: msg,
parts: [],
},
)
EventV2.run(SessionEvent.Compaction.Ended.Sync, {
sessionID: input.sessionID,
timestamp: DateTime.makeUnsafe(Date.now()),
text: summary ?? "",
include: selected.tail_start_id,
})
yield* bus.publish(Event.Compacted, { sessionID: input.sessionID })
}
return result
})
@@ -583,6 +600,11 @@ export const layer: Layer.Layer<
auto: input.auto,
overflow: input.overflow,
})
EventV2.run(SessionEvent.Compaction.Started.Sync, {
sessionID: input.sessionID,
timestamp: DateTime.makeUnsafe(Date.now()),
reason: input.auto ? "auto" : "manual",
})
})
return Service.of({

View File

@@ -20,6 +20,9 @@ import { Question } from "@/question"
import { errorMessage } from "@/util/error"
import * as Log from "@opencode-ai/core/util/log"
import { isRecord } from "@/util/record"
import { EventV2 } from "@/v2/event"
import { SessionEvent } from "@/v2/session-event"
import * as DateTime from "effect/DateTime"
const DOOM_LOOP_THRESHOLD = 3
const log = Log.create({ service: "session.processor" })
@@ -221,6 +224,12 @@ export const layer: Layer.Layer<
case "reasoning-start":
if (value.id in ctx.reasoningMap) return
// TODO(v2): Temporary dual-write while migrating session messages to v2 events.
EventV2.run(SessionEvent.Reasoning.Started.Sync, {
sessionID: ctx.sessionID,
reasoningID: value.id,
timestamp: DateTime.makeUnsafe(Date.now()),
})
ctx.reasoningMap[value.id] = {
id: PartID.ascending(),
messageID: ctx.assistantMessage.id,
@@ -235,6 +244,13 @@ export const layer: Layer.Layer<
case "reasoning-delta":
if (!(value.id in ctx.reasoningMap)) return
// TODO(v2): Temporary dual-write while migrating session messages to v2 events.
EventV2.run(SessionEvent.Reasoning.Delta.Sync, {
sessionID: ctx.sessionID,
reasoningID: value.id,
delta: value.text,
timestamp: DateTime.makeUnsafe(Date.now()),
})
ctx.reasoningMap[value.id].text += value.text
if (value.providerMetadata) ctx.reasoningMap[value.id].metadata = value.providerMetadata
yield* session.updatePartDelta({
@@ -248,6 +264,13 @@ export const layer: Layer.Layer<
case "reasoning-end":
if (!(value.id in ctx.reasoningMap)) return
// TODO(v2): Temporary dual-write while migrating session messages to v2 events.
EventV2.run(SessionEvent.Reasoning.Ended.Sync, {
sessionID: ctx.sessionID,
reasoningID: value.id,
text: ctx.reasoningMap[value.id].text,
timestamp: DateTime.makeUnsafe(Date.now()),
})
// oxlint-disable-next-line no-self-assign -- reactivity trigger
ctx.reasoningMap[value.id].text = ctx.reasoningMap[value.id].text
ctx.reasoningMap[value.id].time = { ...ctx.reasoningMap[value.id].time, end: Date.now() }
@@ -260,6 +283,13 @@ export const layer: Layer.Layer<
if (ctx.assistantMessage.summary) {
throw new Error(`Tool call not allowed while generating summary: ${value.toolName}`)
}
// TODO(v2): Temporary dual-write while migrating session messages to v2 events.
EventV2.run(SessionEvent.Tool.Input.Started.Sync, {
sessionID: ctx.sessionID,
callID: value.id,
name: value.toolName,
timestamp: DateTime.makeUnsafe(Date.now()),
})
const part = yield* session.updatePart({
id: ctx.toolcalls[value.id]?.partID ?? PartID.ascending(),
messageID: ctx.assistantMessage.id,
@@ -281,13 +311,34 @@ export const layer: Layer.Layer<
case "tool-input-delta":
return
case "tool-input-end":
case "tool-input-end": {
// TODO(v2): Temporary dual-write while migrating session messages to v2 events.
EventV2.run(SessionEvent.Tool.Input.Ended.Sync, {
sessionID: ctx.sessionID,
callID: value.id,
text: "",
timestamp: DateTime.makeUnsafe(Date.now()),
})
return
}
case "tool-call": {
if (ctx.assistantMessage.summary) {
throw new Error(`Tool call not allowed while generating summary: ${value.toolName}`)
}
const toolCall = yield* readToolCall(value.toolCallId)
// TODO(v2): Temporary dual-write while migrating session messages to v2 events.
EventV2.run(SessionEvent.Tool.Called.Sync, {
sessionID: ctx.sessionID,
callID: value.toolCallId,
tool: value.toolName,
input: value.input,
provider: {
executed: toolCall?.part.metadata?.providerExecuted === true,
...(value.providerMetadata ? { metadata: value.providerMetadata } : {}),
},
timestamp: DateTime.makeUnsafe(Date.now()),
})
yield* updateToolCall(value.toolCallId, (match) => ({
...match,
tool: value.toolName,
@@ -331,11 +382,48 @@ export const layer: Layer.Layer<
}
case "tool-result": {
const toolCall = yield* readToolCall(value.toolCallId)
// TODO(v2): Temporary dual-write while migrating session messages to v2 events.
EventV2.run(SessionEvent.Tool.Success.Sync, {
sessionID: ctx.sessionID,
callID: value.toolCallId,
structured: value.output.metadata,
content: [
{
type: "text",
text: value.output.output,
},
...(value.output.attachments?.map((item: MessageV2.FilePart) => ({
type: "file",
uri: item.url,
mime: item.mime,
name: item.filename,
})) ?? []),
],
provider: {
executed: toolCall?.part.metadata?.providerExecuted === true,
},
timestamp: DateTime.makeUnsafe(Date.now()),
})
yield* completeToolCall(value.toolCallId, value.output)
return
}
case "tool-error": {
const toolCall = yield* readToolCall(value.toolCallId)
// TODO(v2): Temporary dual-write while migrating session messages to v2 events.
EventV2.run(SessionEvent.Tool.Error.Sync, {
sessionID: ctx.sessionID,
callID: value.toolCallId,
error: {
type: "unknown",
message: errorMessage(value.error),
},
provider: {
executed: toolCall?.part.metadata?.providerExecuted === true,
},
timestamp: DateTime.makeUnsafe(Date.now()),
})
yield* failToolCall(value.toolCallId, value.error)
return
}
@@ -345,6 +433,20 @@ export const layer: Layer.Layer<
case "start-step":
if (!ctx.snapshot) ctx.snapshot = yield* snapshot.track()
if (!ctx.assistantMessage.summary) {
// TODO(v2): Temporary dual-write while migrating session messages to v2 events.
EventV2.run(SessionEvent.Step.Started.Sync, {
sessionID: ctx.sessionID,
agent: input.assistantMessage.agent,
model: {
id: ctx.model.id,
providerID: ctx.model.providerID,
variant: input.assistantMessage.variant,
},
snapshot: ctx.snapshot,
timestamp: DateTime.makeUnsafe(Date.now()),
})
}
yield* session.updatePart({
id: PartID.ascending(),
messageID: ctx.assistantMessage.id,
@@ -355,18 +457,30 @@ export const layer: Layer.Layer<
return
case "finish-step": {
const completedSnapshot = yield* snapshot.track()
const usage = Session.getUsage({
model: ctx.model,
usage: value.usage,
metadata: value.providerMetadata,
})
if (!ctx.assistantMessage.summary) {
// TODO(v2): Temporary dual-write while migrating session messages to v2 events.
EventV2.run(SessionEvent.Step.Ended.Sync, {
sessionID: ctx.sessionID,
finish: value.finishReason,
cost: usage.cost,
tokens: usage.tokens,
snapshot: completedSnapshot,
timestamp: DateTime.makeUnsafe(Date.now()),
})
}
ctx.assistantMessage.finish = value.finishReason
ctx.assistantMessage.cost += usage.cost
ctx.assistantMessage.tokens = usage.tokens
yield* session.updatePart({
id: PartID.ascending(),
reason: value.finishReason,
snapshot: yield* snapshot.track(),
snapshot: completedSnapshot,
messageID: ctx.assistantMessage.id,
sessionID: ctx.assistantMessage.sessionID,
type: "step-finish",
@@ -404,6 +518,13 @@ export const layer: Layer.Layer<
}
case "text-start":
if (!ctx.assistantMessage.summary) {
// TODO(v2): Temporary dual-write while migrating session messages to v2 events.
EventV2.run(SessionEvent.Text.Started.Sync, {
sessionID: ctx.sessionID,
timestamp: DateTime.makeUnsafe(Date.now()),
})
}
ctx.currentText = {
id: PartID.ascending(),
messageID: ctx.assistantMessage.id,
@@ -418,6 +539,13 @@ export const layer: Layer.Layer<
case "text-delta":
if (!ctx.currentText) return
if (ctx.assistantMessage.summary) {
EventV2.run(SessionEvent.Compaction.Delta.Sync, {
sessionID: ctx.sessionID,
text: value.text,
timestamp: DateTime.makeUnsafe(Date.now()),
})
}
ctx.currentText.text += value.text
if (value.providerMetadata) ctx.currentText.metadata = value.providerMetadata
yield* session.updatePartDelta({
@@ -442,6 +570,14 @@ export const layer: Layer.Layer<
},
{ text: ctx.currentText.text },
)).text
if (!ctx.assistantMessage.summary) {
// TODO(v2): Temporary dual-write while migrating session messages to v2 events.
EventV2.run(SessionEvent.Text.Ended.Sync, {
sessionID: ctx.sessionID,
text: ctx.currentText.text,
timestamp: DateTime.makeUnsafe(Date.now()),
})
}
{
const end = Date.now()
ctx.currentText.time = { start: ctx.currentText.time?.start ?? end, end }
@@ -568,13 +704,24 @@ export const layer: Layer.Layer<
Effect.retry(
SessionRetry.policy({
parse,
set: (info) =>
status.set(ctx.sessionID, {
set: (info) => {
// TODO(v2): Temporary dual-write while migrating session messages to v2 events.
EventV2.run(SessionEvent.Retried.Sync, {
sessionID: ctx.sessionID,
attempt: info.attempt,
error: {
message: info.message,
isRetryable: true,
},
timestamp: DateTime.makeUnsafe(Date.now()),
})
return status.set(ctx.sessionID, {
type: "retry",
attempt: info.attempt,
message: info.message,
next: info.next,
}),
})
},
}),
),
Effect.catch(halt),

View File

@@ -0,0 +1,206 @@
import { and, desc, eq } from "@/storage/db"
import type { Database } from "@/storage/db"
import { SessionMessage } from "@/v2/session-message"
import { SessionMessageUpdater } from "@/v2/session-message-updater"
import { SessionEvent } from "@/v2/session-event"
import * as DateTime from "effect/DateTime"
import { SyncEvent } from "@/sync"
import { SessionMessageTable, SessionTable } from "./session.sql"
import type { SessionID } from "./schema"
import { Schema } from "effect"
const decodeMessage = Schema.decodeUnknownSync(SessionMessage.Message)
type SessionMessageData = NonNullable<(typeof SessionMessageTable.$inferInsert)["data"]>
function encodeDateTimes(value: unknown): unknown {
if (DateTime.isDateTime(value)) return DateTime.toEpochMillis(value)
if (Array.isArray(value)) return value.map(encodeDateTimes)
if (typeof value === "object" && value !== null) {
return Object.fromEntries(Object.entries(value).map(([key, item]) => [key, encodeDateTimes(item)]))
}
return value
}
function encodeMessageData(value: unknown): SessionMessageData {
return encodeDateTimes(value) as SessionMessageData
}
function sqlite(db: Database.TxOrDb, sessionID: SessionID): SessionMessageUpdater.Adapter<void> {
return {
getCurrentAssistant() {
return db
.select()
.from(SessionMessageTable)
.where(and(eq(SessionMessageTable.session_id, sessionID), eq(SessionMessageTable.type, "assistant")))
.orderBy(desc(SessionMessageTable.id))
.all()
.map((row) => decodeMessage({ ...row.data, id: row.id, type: row.type }))
.find((message): message is SessionMessage.Assistant => message.type === "assistant" && !message.time.completed)
},
getCurrentCompaction() {
return db
.select()
.from(SessionMessageTable)
.where(and(eq(SessionMessageTable.session_id, sessionID), eq(SessionMessageTable.type, "compaction")))
.orderBy(desc(SessionMessageTable.id))
.all()
.map((row) => decodeMessage({ ...row.data, id: row.id, type: row.type }))
.find((message): message is SessionMessage.Compaction => message.type === "compaction")
},
getCurrentShell(callID) {
return db
.select()
.from(SessionMessageTable)
.where(and(eq(SessionMessageTable.session_id, sessionID), eq(SessionMessageTable.type, "shell")))
.orderBy(desc(SessionMessageTable.id))
.all()
.map((row) => decodeMessage({ ...row.data, id: row.id, type: row.type }))
.find((message): message is SessionMessage.Shell => message.type === "shell" && message.callID === callID)
},
updateAssistant(assistant) {
const { id, type, ...data } = assistant
db.update(SessionMessageTable)
.set({ data: encodeMessageData(data) })
.where(
and(
eq(SessionMessageTable.id, id),
eq(SessionMessageTable.session_id, sessionID),
eq(SessionMessageTable.type, type),
),
)
.run()
},
updateCompaction(compaction) {
const { id, type, ...data } = compaction
db.update(SessionMessageTable)
.set({ data: encodeMessageData(data) })
.where(
and(
eq(SessionMessageTable.id, id),
eq(SessionMessageTable.session_id, sessionID),
eq(SessionMessageTable.type, type),
),
)
.run()
},
updateShell(shell) {
const { id, type, ...data } = shell
db.update(SessionMessageTable)
.set({ data: encodeMessageData(data) })
.where(
and(
eq(SessionMessageTable.id, id),
eq(SessionMessageTable.session_id, sessionID),
eq(SessionMessageTable.type, type),
),
)
.run()
},
appendMessage(message) {
const { id, type, ...data } = message
db.insert(SessionMessageTable)
.values([
{
id,
session_id: sessionID,
type,
time_created: DateTime.toEpochMillis(message.time.created),
data: encodeMessageData(data),
},
])
.run()
},
finish() {},
}
}
function update(db: Database.TxOrDb, event: SessionEvent.Event) {
SessionMessageUpdater.update(sqlite(db, event.data.sessionID), event)
}
export default [
SyncEvent.project(SessionEvent.AgentSwitched.Sync, (db, data, event) => {
db.update(SessionTable)
.set({
agent: data.agent,
time_updated: DateTime.toEpochMillis(data.timestamp),
})
.where(eq(SessionTable.id, data.sessionID))
.run()
update(db, { id: SessionMessage.ID.make(event.id), type: "session.next.agent.switched", data })
}),
SyncEvent.project(SessionEvent.ModelSwitched.Sync, (db, data, event) => {
db.update(SessionTable)
.set({
model: {
id: data.id,
providerID: data.providerID,
variant: data.variant,
},
time_updated: DateTime.toEpochMillis(data.timestamp),
})
.where(eq(SessionTable.id, data.sessionID))
.run()
update(db, { id: SessionMessage.ID.make(event.id), type: "session.next.model.switched", data })
}),
SyncEvent.project(SessionEvent.Prompted.Sync, (db, data, event) => {
update(db, { id: SessionMessage.ID.make(event.id), type: "session.next.prompted", data })
}),
SyncEvent.project(SessionEvent.Synthetic.Sync, (db, data, event) => {
update(db, { id: SessionMessage.ID.make(event.id), type: "session.next.synthetic", data })
}),
SyncEvent.project(SessionEvent.Shell.Started.Sync, (db, data, event) => {
update(db, { id: SessionMessage.ID.make(event.id), type: "session.next.shell.started", data })
}),
SyncEvent.project(SessionEvent.Shell.Ended.Sync, (db, data, event) => {
update(db, { id: SessionMessage.ID.make(event.id), type: "session.next.shell.ended", data })
}),
SyncEvent.project(SessionEvent.Step.Started.Sync, (db, data, event) => {
update(db, { id: SessionMessage.ID.make(event.id), type: "session.next.step.started", data })
}),
SyncEvent.project(SessionEvent.Step.Ended.Sync, (db, data, event) => {
update(db, { id: SessionMessage.ID.make(event.id), type: "session.next.step.ended", data })
}),
SyncEvent.project(SessionEvent.Text.Started.Sync, (db, data, event) => {
update(db, { id: SessionMessage.ID.make(event.id), type: "session.next.text.started", data })
}),
SyncEvent.project(SessionEvent.Text.Delta.Sync, () => {}),
SyncEvent.project(SessionEvent.Text.Ended.Sync, (db, data, event) => {
update(db, { id: SessionMessage.ID.make(event.id), type: "session.next.text.ended", data })
}),
SyncEvent.project(SessionEvent.Tool.Input.Started.Sync, (db, data, event) => {
update(db, { id: SessionMessage.ID.make(event.id), type: "session.next.tool.input.started", data })
}),
SyncEvent.project(SessionEvent.Tool.Input.Delta.Sync, () => {}),
SyncEvent.project(SessionEvent.Tool.Input.Ended.Sync, (db, data, event) => {
update(db, { id: SessionMessage.ID.make(event.id), type: "session.next.tool.input.ended", data })
}),
SyncEvent.project(SessionEvent.Tool.Called.Sync, (db, data, event) => {
update(db, { id: SessionMessage.ID.make(event.id), type: "session.next.tool.called", data })
}),
SyncEvent.project(SessionEvent.Tool.Success.Sync, (db, data, event) => {
update(db, { id: SessionMessage.ID.make(event.id), type: "session.next.tool.success", data })
}),
SyncEvent.project(SessionEvent.Tool.Error.Sync, (db, data, event) => {
update(db, { id: SessionMessage.ID.make(event.id), type: "session.next.tool.error", data })
}),
SyncEvent.project(SessionEvent.Reasoning.Started.Sync, (db, data, event) => {
update(db, { id: SessionMessage.ID.make(event.id), type: "session.next.reasoning.started", data })
}),
SyncEvent.project(SessionEvent.Reasoning.Delta.Sync, () => {}),
SyncEvent.project(SessionEvent.Reasoning.Ended.Sync, (db, data, event) => {
update(db, { id: SessionMessage.ID.make(event.id), type: "session.next.reasoning.ended", data })
}),
SyncEvent.project(SessionEvent.Retried.Sync, (db, data, event) => {
update(db, { id: SessionMessage.ID.make(event.id), type: "session.next.retried", data })
}),
SyncEvent.project(SessionEvent.Compaction.Started.Sync, (db, data, event) => {
update(db, { id: SessionMessage.ID.make(event.id), type: "session.next.compaction.started", data })
}),
SyncEvent.project(SessionEvent.Compaction.Delta.Sync, (db, data, event) => {
update(db, { id: SessionMessage.ID.make(event.id), type: "session.next.compaction.delta", data })
}),
SyncEvent.project(SessionEvent.Compaction.Ended.Sync, (db, data, event) => {
update(db, { id: SessionMessage.ID.make(event.id), type: "session.next.compaction.ended", data })
}),
]

View File

@@ -5,7 +5,8 @@ import { SyncEvent } from "@/sync"
import * as Session from "./session"
import { MessageV2 } from "./message-v2"
import { SessionTable, MessageTable, PartTable } from "./session.sql"
import * as Log from "@opencode-ai/core/util/log"
import { Log } from "@opencode-ai/core/util/log"
import nextProjectors from "./projectors-next"
const log = Log.create({ service: "session.projector" })
@@ -136,4 +137,6 @@ export default [
log.warn("ignored late part update", { partID: id, messageID, sessionID })
}
}),
...nextProjectors,
]

View File

@@ -41,6 +41,7 @@ import { Permission } from "@/permission"
import { SessionStatus } from "./status"
import { LLM } from "./llm"
import { Shell } from "@/shell/shell"
import { ShellToolID } from "@/tool/shell/id"
import { AppFileSystem } from "@opencode-ai/core/filesystem"
import { Truncate } from "@/tool/truncate"
import { decodeDataUrl } from "@/util/data-url"
@@ -53,6 +54,13 @@ import { InstanceState } from "@/effect/instance-state"
import { TaskTool, type TaskPromptOps } from "@/tool/task"
import { SessionRunState } from "./run-state"
import { EffectBridge } from "@/effect/bridge"
import { EventV2 } from "@/v2/event"
import { SessionEvent } from "@/v2/session-event"
import { AgentAttachment, FileAttachment, Source } from "@/v2/session-prompt"
import * as DateTime from "effect/DateTime"
import { eq } from "@/storage/db"
import * as Database from "@/storage/db"
import { SessionTable } from "./session.sql"
// @ts-ignore
globalThis.AI_SDK_LOG_WARNINGS = false
@@ -784,20 +792,28 @@ NOTE: At any point in time through this workflow you should feel free to ask the
providerID: model.providerID,
}
yield* sessions.updateMessage(msg)
const callID = ulid()
const started = Date.now()
const part: MessageV2.ToolPart = {
type: "tool",
id: PartID.ascending(),
messageID: msg.id,
sessionID: input.sessionID,
tool: "bash",
callID: ulid(),
tool: ShellToolID.id,
callID,
state: {
status: "running",
time: { start: Date.now() },
time: { start: started },
input: { command: input.command },
},
}
yield* sessions.updatePart(part)
EventV2.run(SessionEvent.Shell.Started.Sync, {
sessionID: input.sessionID,
timestamp: DateTime.makeUnsafe(started),
callID,
command: input.command,
})
return { msg, part, cwd: ctx.directory }
}).pipe(Effect.ensuring(markReady))
@@ -812,14 +828,21 @@ NOTE: At any point in time through this workflow you should feel free to ask the
if (aborted) {
output += "\n\n" + ["<metadata>", "User aborted the command", "</metadata>"].join("\n")
}
const completed = Date.now()
EventV2.run(SessionEvent.Shell.Ended.Sync, {
sessionID: input.sessionID,
timestamp: DateTime.makeUnsafe(completed),
callID: part.callID,
output,
})
if (!msg.time.completed) {
msg.time.completed = Date.now()
msg.time.completed = completed
yield* sessions.updateMessage(msg)
}
if (part.state.status === "running") {
part.state = {
status: "completed",
time: { ...part.state.time, end: Date.now() },
time: { ...part.state.time, end: completed },
input: part.state.input,
title: "",
metadata: { output, description: "" },
@@ -933,6 +956,34 @@ NOTE: At any point in time through this workflow you should feel free to ask the
format: input.format,
}
const current = Database.use((db) =>
db
.select({ agent: SessionTable.agent, model: SessionTable.model })
.from(SessionTable)
.where(eq(SessionTable.id, input.sessionID))
.get(),
)
if (current?.agent !== info.agent) {
EventV2.run(SessionEvent.AgentSwitched.Sync, {
sessionID: input.sessionID,
timestamp: DateTime.makeUnsafe(info.time.created),
agent: info.agent,
})
}
if (
current?.model?.providerID !== info.model.providerID ||
current.model.id !== info.model.modelID ||
current.model.variant !== info.model.variant
) {
EventV2.run(SessionEvent.ModelSwitched.Sync, {
sessionID: input.sessionID,
timestamp: DateTime.makeUnsafe(info.time.created),
id: info.model.modelID,
providerID: info.model.providerID,
variant: info.model.variant,
})
}
yield* Effect.addFinalizer(() => instruction.clear(info.id))
type Draft<T> = T extends MessageV2.Part ? Omit<T, "id"> & { id?: string } : never
@@ -1249,6 +1300,69 @@ NOTE: At any point in time through this workflow you should feel free to ask the
yield* sessions.updateMessage(info)
for (const part of parts) yield* sessions.updatePart(part)
const nextPrompt = parts.reduce(
(result, part) => {
if (part.type === "text") {
if (part.synthetic) result.synthetic.push(part.text)
else result.text.push(part.text)
}
if (part.type === "file") {
result.files.push(
new FileAttachment({
uri: part.url,
mime: part.mime,
name: part.filename,
source: part.source
? new Source({
start: part.source.text.start,
end: part.source.text.end,
text: part.source.text.value,
})
: undefined,
}),
)
}
if (part.type === "agent") {
result.agents.push(
new AgentAttachment({
name: part.name,
source: part.source
? new Source({
start: part.source.start,
end: part.source.end,
text: part.source.value,
})
: undefined,
}),
)
}
return result
},
{
text: [] as string[],
files: [] as FileAttachment[],
agents: [] as AgentAttachment[],
synthetic: [] as string[],
},
)
// TODO(v2): Temporary dual-write while migrating session messages to v2 events.
EventV2.run(SessionEvent.Prompted.Sync, {
sessionID: input.sessionID,
timestamp: DateTime.makeUnsafe(info.time.created),
prompt: {
text: nextPrompt.text.join("\n"),
files: nextPrompt.files,
agents: nextPrompt.agents,
},
})
for (const text of nextPrompt.synthetic) {
// TODO(v2): Temporary dual-write while migrating session messages to v2 events.
EventV2.run(SessionEvent.Synthetic.Sync, {
sessionID: input.sessionID,
timestamp: DateTime.makeUnsafe(info.time.created),
text,
})
}
return { info, parts }
}, Effect.scoped)

View File

@@ -1,7 +1,7 @@
import { sqliteTable, text, integer, index, primaryKey } from "drizzle-orm/sqlite-core"
import { ProjectTable } from "../project/project.sql"
import type { MessageV2 } from "./message-v2"
import type { SessionEntry } from "../v2/session-entry"
import type { SessionMessage } from "../v2/session-message"
import type { Snapshot } from "../snapshot"
import type { Permission } from "../permission"
import type { ProjectID } from "../project/schema"
@@ -11,6 +11,7 @@ import { Timestamps } from "../storage/schema.sql"
type PartData = Omit<MessageV2.Part, "id" | "sessionID" | "messageID">
type InfoData = Omit<MessageV2.Info, "id" | "sessionID">
type SessionMessageData = Omit<(typeof SessionMessage.Message)["Encoded"], "type" | "id">
export const SessionTable = sqliteTable(
"session",
@@ -34,6 +35,12 @@ export const SessionTable = sqliteTable(
summary_diffs: text({ mode: "json" }).$type<Snapshot.FileDiff[]>(),
revert: text({ mode: "json" }).$type<{ messageID: MessageID; partID?: PartID; snapshot?: string; diff?: string }>(),
permission: text({ mode: "json" }).$type<Permission.Ruleset>(),
agent: text(),
model: text({ mode: "json" }).$type<{
id: string
providerID: string
variant?: string
}>(),
...Timestamps,
time_compacting: integer(),
time_archived: integer(),
@@ -96,22 +103,22 @@ export const TodoTable = sqliteTable(
],
)
export const SessionEntryTable = sqliteTable(
"session_entry",
export const SessionMessageTable = sqliteTable(
"session_message",
{
id: text().$type<SessionEntry.ID>().primaryKey(),
id: text().$type<SessionMessage.ID>().primaryKey(),
session_id: text()
.$type<SessionID>()
.notNull()
.references(() => SessionTable.id, { onDelete: "cascade" }),
type: text().$type<SessionEntry.Type>().notNull(),
type: text().$type<SessionMessage.Type>().notNull(),
...Timestamps,
data: text({ mode: "json" }).notNull().$type<Omit<SessionEntry.Entry, "type" | "id">>(),
data: text({ mode: "json" }).notNull().$type<SessionMessageData>(),
},
(table) => [
index("session_entry_session_idx").on(table.session_id),
index("session_entry_session_type_idx").on(table.session_id, table.type),
index("session_entry_time_created_idx").on(table.time_created),
index("session_message_session_idx").on(table.session_id),
index("session_message_session_type_idx").on(table.session_id, table.type),
index("session_message_time_created_idx").on(table.time_created),
],
)

View File

@@ -32,6 +32,7 @@ import { Snapshot } from "@/snapshot"
import { ProjectID } from "../project/schema"
import { WorkspaceID } from "../control-plane/schema"
import { SessionID, MessageID, PartID } from "./schema"
import { ModelID, ProviderID } from "@/provider/schema"
import type { Provider } from "@/provider/provider"
import { Permission } from "@/permission"
@@ -78,6 +79,10 @@ export function fromRow(row: SessionRow): Info {
path: row.path ?? undefined,
parentID: row.parent_id ?? undefined,
title: row.title,
agent: row.agent ?? undefined,
model: row.model
? { id: ModelID.make(row.model.id), providerID: ProviderID.make(row.model.providerID), variant: row.model.variant }
: undefined,
version: row.version,
summary,
share,
@@ -102,6 +107,8 @@ export function toRow(info: Info) {
directory: info.directory,
path: info.path,
title: info.title,
agent: info.agent,
model: info.model,
version: info.version,
share_url: info.share?.url,
summary_additions: info.summary?.additions,
@@ -160,6 +167,12 @@ const Revert = Schema.Struct({
diff: optionalOmitUndefined(Schema.String),
})
const Model = Schema.Struct({
id: ModelID,
providerID: ProviderID,
variant: optionalOmitUndefined(Schema.String),
})
export const Info = Schema.Struct({
id: SessionID,
slug: Schema.String,
@@ -171,6 +184,8 @@ export const Info = Schema.Struct({
summary: optionalOmitUndefined(Summary),
share: optionalOmitUndefined(Share),
title: Schema.String,
agent: optionalOmitUndefined(Schema.String),
model: optionalOmitUndefined(Model),
version: Schema.String,
time: Time,
permission: optionalOmitUndefined(Permission.Ruleset),
@@ -201,6 +216,8 @@ export const CreateInput = Schema.optional(
Schema.Struct({
parentID: Schema.optional(SessionID),
title: Schema.optional(Schema.String),
agent: Schema.optional(Schema.String),
model: Schema.optional(Model),
permission: Schema.optional(Permission.Ruleset),
workspaceID: Schema.optional(WorkspaceID),
}),
@@ -272,6 +289,8 @@ const UpdatedInfo = Schema.Struct({
summary: Schema.optional(Schema.NullOr(Summary)),
share: Schema.optional(UpdatedShare),
title: Schema.optional(Schema.NullOr(Schema.String)),
agent: Schema.optional(Schema.NullOr(Schema.String)),
model: Schema.optional(Schema.NullOr(Model)),
version: Schema.optional(Schema.NullOr(Schema.String)),
time: Schema.optional(UpdatedTime),
permission: Schema.optional(Schema.NullOr(Permission.Ruleset)),
@@ -316,7 +335,8 @@ export const Event = {
sessionID: Schema.optional(SessionID),
// Reuses MessageV2.Assistant.fields.error (already Schema.optional) so
// the derived zod keeps the same discriminated-union shape on the bus.
error: MessageV2.Assistant.fields.error,
// Schema.suspend defers access to break circular init in compiled binaries.
error: Schema.suspend(() => MessageV2.Assistant.fields.error),
}),
),
}
@@ -404,6 +424,8 @@ export interface Interface {
readonly create: (input?: {
parentID?: SessionID
title?: string
agent?: string
model?: Schema.Schema.Type<typeof Model>
permission?: Permission.Ruleset
workspaceID?: WorkspaceID
}) => Effect.Effect<Info>
@@ -464,6 +486,8 @@ export const layer: Layer.Layer<Service, never, Bus.Service | Storage.Service |
const createNext = Effect.fn("Session.createNext")(function* (input: {
id?: SessionID
title?: string
agent?: string
model?: Schema.Schema.Type<typeof Model>
parentID?: SessionID
workspaceID?: WorkspaceID
directory: string
@@ -481,6 +505,8 @@ export const layer: Layer.Layer<Service, never, Bus.Service | Storage.Service |
workspaceID: input.workspaceID,
parentID: input.parentID,
title: input.title ?? createDefaultTitle(!!input.parentID),
agent: input.agent,
model: input.model,
permission: input.permission,
time: {
created: Date.now(),
@@ -591,6 +617,8 @@ export const layer: Layer.Layer<Service, never, Bus.Service | Storage.Service |
const create = Effect.fn("Session.create")(function* (input?: {
parentID?: SessionID
title?: string
agent?: string
model?: Schema.Schema.Type<typeof Model>
permission?: Permission.Ruleset
workspaceID?: WorkspaceID
}) {
@@ -601,6 +629,8 @@ export const layer: Layer.Layer<Service, never, Bus.Service | Storage.Service |
directory: ctx.directory,
path: sessionPath(ctx.worktree, ctx.directory),
title: input?.title,
agent: input?.agent,
model: input?.model,
permission: input?.permission,
workspaceID: input?.workspaceID ?? workspace,
})

View File

@@ -46,7 +46,7 @@ export type Properties<Def extends Definition = Definition> = EffectSchema.Schem
export type SerializedEvent<Def extends Definition = Definition> = Event<Def> & { type: string }
type ProjectorFunc = (db: Database.TxOrDb, data: unknown) => void
type ProjectorFunc = (db: Database.TxOrDb, data: unknown, event: Event) => void
type ConvertEvent = (type: string, data: Event["data"]) => unknown | Promise<unknown>
type PublishContext = {
instance?: InstanceContext
@@ -255,7 +255,7 @@ export function define<
export function project<Def extends Definition>(
def: Def,
func: (db: Database.TxOrDb, data: Event<Def>["data"]) => void,
func: (db: Database.TxOrDb, data: Event<Def>["data"], event: Event<Def>) => void,
): [Definition, ProjectorFunc] {
return [def, func as ProjectorFunc]
}
@@ -277,7 +277,7 @@ function process<Def extends Definition>(
// idempotent: need to ignore any events already logged
Database.transaction((tx) => {
projector(tx, event.data)
projector(tx, event.data, event)
if (Flag.OPENCODE_EXPERIMENTAL_WORKSPACES) {
tx.insert(EventSequenceTable)
@@ -308,7 +308,7 @@ function process<Def extends Definition>(
}
const result = convertEvent(def.type, event.data)
const publish = (data: unknown) => ProjectBus.publish(def, data as Properties<Def>)
const publish = (data: unknown) => ProjectBus.publish(def, data as Properties<Def>, { id: event.id })
if (result instanceof Promise) {
void result.then(publish)
} else {

View File

@@ -1,7 +1,7 @@
import { PlanExitTool } from "./plan"
import { Session } from "@/session/session"
import { QuestionTool } from "./question"
import { BashTool } from "./bash"
import { ShellTool } from "./shell"
import { EditTool } from "./edit"
import { GlobTool } from "./glob"
import { GrepTool } from "./grep"
@@ -106,7 +106,7 @@ export const layer: Layer.Layer<
const plan = yield* PlanExitTool
const webfetch = yield* WebFetchTool
const websearch = yield* WebSearchTool
const bash = yield* BashTool
const shell = yield* ShellTool
const globtool = yield* GlobTool
const writetool = yield* WriteTool
const edit = yield* EditTool
@@ -195,7 +195,7 @@ export const layer: Layer.Layer<
const tool = yield* Effect.all({
invalid: Tool.init(invalid),
bash: Tool.init(bash),
shell: Tool.init(shell),
read: Tool.init(read),
glob: Tool.init(globtool),
grep: Tool.init(greptool),
@@ -217,7 +217,7 @@ export const layer: Layer.Layer<
builtin: [
tool.invalid,
...(questionEnabled ? [tool.question] : []),
tool.bash,
tool.shell,
tool.read,
tool.glob,
tool.grep,

View File

@@ -1,12 +1,11 @@
import { Schema } from "effect"
import { PositiveInt } from "@/util/schema"
import { Effect, Stream } from "effect"
import os from "os"
import { createWriteStream } from "node:fs"
import * as Tool from "./tool"
import path from "path"
import DESCRIPTION from "./bash.txt"
import * as Log from "@opencode-ai/core/util/log"
import { containsPath, type InstanceContext } from "../project/instance-context"
import { InstanceState } from "@/effect/instance-state"
import { lazy } from "@/util/lazy"
import { Language, type Node } from "web-tree-sitter"
@@ -14,20 +13,21 @@ import { AppFileSystem } from "@opencode-ai/core/filesystem"
import { fileURLToPath } from "url"
import { Config } from "@/config/config"
import { Flag } from "@opencode-ai/core/flag/flag"
import { Global } from "@opencode-ai/core/global"
import { Shell } from "@/shell/shell"
import { ShellKind, ShellToolID } from "./shell/id"
import { BashArity } from "@/permission/arity"
import * as Truncate from "./truncate"
import { Plugin } from "@/plugin"
import { Effect, Stream } from "effect"
import { ChildProcess } from "effect/unstable/process"
import { ChildProcessSpawner } from "effect/unstable/process/ChildProcessSpawner"
import { InstanceState } from "@/effect/instance-state"
import { ShellPrompt, type Parameters } from "./shell/prompt"
import { BashArity } from "@/permission/arity"
export { Parameters } from "./shell/prompt"
const MAX_METADATA_LENGTH = 30_000
const DEFAULT_TIMEOUT = Flag.OPENCODE_EXPERIMENTAL_BASH_DEFAULT_TIMEOUT_MS || 2 * 60 * 1000
const CWD = new Set(["cd", "push-location", "set-location"])
const CWD = new Set(["cd", "chdir", "popd", "pushd", "push-location", "set-location"])
const FILES = new Set([
...CWD,
"rm",
@@ -50,21 +50,10 @@ const FILES = new Set([
"new-item",
"rename-item",
])
const CMD_FILES = new Set(["copy", "del", "dir", "erase", "md", "mkdir", "move", "rd", "ren", "rename", "rmdir", "type"])
const FLAGS = new Set(["-destination", "-literalpath", "-path"])
const SWITCHES = new Set(["-confirm", "-debug", "-force", "-nonewline", "-recurse", "-verbose", "-whatif"])
export const Parameters = Schema.Struct({
command: Schema.String.annotate({ description: "The command to execute" }),
timeout: Schema.optional(PositiveInt).annotate({ description: "Optional timeout in milliseconds" }),
workdir: Schema.optional(Schema.String).annotate({
description: `The working directory to run the command in. Defaults to the current directory. Use this instead of 'cd' commands.`,
}),
description: Schema.String.annotate({
description:
"Clear, concise description of what this command does in 5-10 words. Examples:\nInput: ls\nOutput: Lists files in current directory\n\nInput: git status\nOutput: Shows working tree status\n\nInput: npm install\nOutput: Installs package dependencies\n\nInput: mkdir foo\nOutput: Creates directory 'foo'",
}),
})
type Part = {
type: string
text: string
@@ -81,7 +70,7 @@ type Chunk = {
size: number
}
export const log = Log.create({ service: "bash-tool" })
export const log = Log.create({ service: "shell-tool" })
const resolveWasm = (asset: string) => {
if (asset.startsWith("file://")) return fileURLToPath(asset)
@@ -187,11 +176,16 @@ function prefix(text: string) {
return text.slice(0, match.index)
}
function pathArgs(list: Part[], ps: boolean) {
function pathArgs(list: Part[], ps: boolean, cmd = false) {
if (!ps) {
return list
.slice(1)
.filter((item) => !item.text.startsWith("-") && !(list[0]?.text === "chmod" && item.text.startsWith("+")))
.filter(
(item) =>
!item.text.startsWith("-") &&
!(cmd && item.text.startsWith("/")) &&
!(list[0]?.text === "chmod" && item.text.startsWith("+")),
)
.map((item) => item.text)
}
@@ -251,13 +245,13 @@ function tail(text: string, maxLines: number, maxBytes: number) {
}
}
const parse = Effect.fn("BashTool.parse")(function* (command: string, ps: boolean) {
const parse = Effect.fn("ShellTool.parse")(function* (command: string, ps: boolean) {
const tree = yield* Effect.promise(() => parser().then((p) => (ps ? p.ps : p.bash).parse(command)))
if (!tree) throw new Error("Failed to parse command")
return tree
})
const ask = Effect.fn("BashTool.ask")(function* (ctx: Tool.Context, scan: Scan) {
const ask = Effect.fn("ShellTool.ask")(function* (ctx: Tool.Context, scan: Scan) {
if (scan.dirs.size > 0) {
const globs = Array.from(scan.dirs).map((dir) => {
if (process.platform === "win32") return AppFileSystem.normalizePathPattern(path.join(dir, "*"))
@@ -273,7 +267,7 @@ const ask = Effect.fn("BashTool.ask")(function* (ctx: Tool.Context, scan: Scan)
if (scan.patterns.size === 0) return
yield* ctx.ask({
permission: "bash",
permission: ShellToolID.id,
patterns: Array.from(scan.patterns),
always: Array.from(scan.always),
metadata: {},
@@ -325,9 +319,8 @@ const parser = lazy(async () => {
return { bash, ps }
})
// TODO: we may wanna rename this tool so it works better on other shells
export const BashTool = Tool.define(
"bash",
export const ShellTool = Tool.define(
ShellToolID.id,
Effect.gen(function* () {
const config = yield* Config.Service
const spawner = yield* ChildProcessSpawner
@@ -335,7 +328,7 @@ export const BashTool = Tool.define(
const trunc = yield* Truncate.Service
const plugin = yield* Plugin.Service
const cygpath = Effect.fn("BashTool.cygpath")(function* (shell: string, text: string) {
const cygpath = Effect.fn("ShellTool.cygpath")(function* (shell: string, text: string) {
const lines = yield* spawner
.lines(ChildProcess.make(shell, ["-lc", 'cygpath -w -- "$1"', "_", text]))
.pipe(Effect.catch(() => Effect.succeed([] as string[])))
@@ -344,7 +337,7 @@ export const BashTool = Tool.define(
return AppFileSystem.normalizePath(file)
})
const resolvePath = Effect.fn("BashTool.resolvePath")(function* (text: string, root: string, shell: string) {
const resolvePath = Effect.fn("ShellTool.resolvePath")(function* (text: string, root: string, shell: string) {
if (process.platform === "win32") {
if (Shell.posix(shell) && text.startsWith("/") && AppFileSystem.windowsPath(text) === text) {
const file = yield* cygpath(shell, text)
@@ -355,7 +348,7 @@ export const BashTool = Tool.define(
return path.resolve(root, text)
})
const argPath = Effect.fn("BashTool.argPath")(function* (arg: string, cwd: string, ps: boolean, shell: string) {
const argPath = Effect.fn("ShellTool.argPath")(function* (arg: string, cwd: string, ps: boolean, shell: string) {
const text = ps ? expand(arg, cwd, shell) : home(unquote(arg))
const file = text && prefix(text)
if (!file || dynamic(file, ps)) return
@@ -364,7 +357,7 @@ export const BashTool = Tool.define(
return yield* resolvePath(next, cwd, shell)
})
const collect = Effect.fn("BashTool.collect")(function* (
const collect = Effect.fn("ShellTool.collect")(function* (
root: Node,
cwd: string,
ps: boolean,
@@ -376,14 +369,15 @@ export const BashTool = Tool.define(
patterns: new Set<string>(),
always: new Set<string>(),
}
const shellKind = ShellKind.from(Shell.name(shell))
for (const node of commands(root)) {
const command = parts(node)
const tokens = command.map((item) => item.text)
const cmd = ps ? tokens[0]?.toLowerCase() : tokens[0]
const cmd = ps || shellKind === "cmd" ? tokens[0]?.toLowerCase() : tokens[0]
if (cmd && FILES.has(cmd)) {
for (const arg of pathArgs(command, ps)) {
if (cmd && (FILES.has(cmd) || (shellKind === "cmd" && CMD_FILES.has(cmd)))) {
for (const arg of pathArgs(command, ps, shellKind === "cmd")) {
const resolved = yield* argPath(arg, cwd, ps, shell)
log.info("resolved path", { arg, resolved })
if (!resolved || containsPath(resolved, instance)) continue
@@ -401,7 +395,7 @@ export const BashTool = Tool.define(
return scan
})
const shellEnv = Effect.fn("BashTool.shellEnv")(function* (ctx: Tool.Context, cwd: string) {
const shellEnv = Effect.fn("ShellTool.shellEnv")(function* (ctx: Tool.Context, cwd: string) {
const extra = yield* plugin.trigger(
"shell.env",
{ cwd, sessionID: ctx.sessionID, callID: ctx.callID },
@@ -413,7 +407,7 @@ export const BashTool = Tool.define(
}
})
const run = Effect.fn("BashTool.run")(function* (
const run = Effect.fn("ShellTool.run")(function* (
input: {
shell: string
command: string
@@ -527,7 +521,7 @@ export const BashTool = Tool.define(
const meta: string[] = []
if (expired) {
meta.push(
`bash tool terminated command after exceeding timeout ${input.timeout} ms. If this command is expected to take longer and is not waiting for interactive input, retry with a larger timeout value in milliseconds.`,
`shell tool terminated command after exceeding timeout ${input.timeout} ms. If this command is expected to take longer and is not waiting for interactive input, retry with a larger timeout value in milliseconds.`,
)
}
if (aborted) meta.push("User aborted the command")
@@ -546,7 +540,7 @@ export const BashTool = Tool.define(
}
if (meta.length > 0) {
output += "\n\n<bash_metadata>\n" + meta.join("\n") + "\n</bash_metadata>"
output += "\n\n<shell_metadata>\n" + meta.join("\n") + "\n</shell_metadata>"
}
if (sink) {
const stream = sink
@@ -577,25 +571,14 @@ export const BashTool = Tool.define(
const cfg = yield* config.get()
const shell = Shell.acceptable(cfg.shell)
const name = Shell.name(shell)
const chain =
name === "powershell"
? "If the commands depend on each other and must run sequentially, avoid '&&' in this shell because Windows PowerShell 5.1 does not support it. Use PowerShell conditionals such as `cmd1; if ($?) { cmd2 }` when later commands must depend on earlier success."
: "If the commands depend on each other and must run sequentially, use a single Bash call with '&&' to chain them together (e.g., `git add . && git commit -m \"message\" && git push`). For instance, if one operation must complete before another starts (like mkdir before cp, Write before Bash for git operations, or git add before git commit), run these operations sequentially instead."
log.info("bash tool using shell", { shell })
const limits = yield* trunc.limits()
const instance = yield* InstanceState.context
const prompt = ShellPrompt.render(name, process.platform, limits)
log.info("shell tool using shell", { shell })
return {
description: DESCRIPTION.replaceAll("${directory}", instance.directory)
.replaceAll("${tmp}", Global.Path.tmp)
.replaceAll("${os}", process.platform)
.replaceAll("${shell}", name)
.replaceAll("${chaining}", chain)
.replaceAll("${maxLines}", String(limits.maxLines))
.replaceAll("${maxBytes}", String(limits.maxBytes)),
parameters: Parameters,
execute: (params: Schema.Schema.Type<typeof Parameters>, ctx: Tool.Context) =>
description: prompt.description,
parameters: prompt.parameters,
execute: (params: Parameters, ctx: Tool.Context) =>
Effect.gen(function* () {
const executeInstance = yield* InstanceState.context
const cwd = params.workdir

View File

@@ -0,0 +1,28 @@
export namespace ShellKind {
export const ids = ["bash", "pwsh", "powershell", "cmd"] as const
export type ID = (typeof ids)[number]
const kind = new Set<string>(ids)
const ps = new Set<string>(["pwsh", "powershell"])
export function has(value: string): value is ID {
return kind.has(value)
}
export function from(value: string): ID {
return has(value) ? value : "bash"
}
export function powershell(value: string) {
return ps.has(value)
}
}
export namespace ShellToolID {
export const id = "bash"
export type ID = typeof id
export function has(value: string): value is ID {
return value === id
}
}

View File

@@ -0,0 +1,299 @@
import { Schema } from "effect"
import DESCRIPTION from "./shell.txt"
import { PositiveInt } from "@/util/schema"
import { Global } from "@opencode-ai/core/global"
const PS = new Set(["powershell", "pwsh"])
const CMD = new Set(["cmd"])
const descriptions = {
bash:
"Clear, concise description of what this command does in 5-10 words. Examples:\nInput: ls\nOutput: Lists files in current directory\n\nInput: git status\nOutput: Shows working tree status\n\nInput: npm install\nOutput: Installs package dependencies\n\nInput: mkdir foo\nOutput: Creates directory 'foo'",
powershell:
'Clear, concise description of what this command does in 5-10 words. Examples:\nInput: Get-ChildItem -LiteralPath "."\nOutput: Lists current directory\n\nInput: git status\nOutput: Shows working tree status\n\nInput: npm install\nOutput: Installs package dependencies\n\nInput: New-Item -ItemType Directory -Path "tmp"\nOutput: Creates directory tmp',
cmd:
'Clear, concise description of what this command does in 5-10 words. Examples:\nInput: dir\nOutput: Lists current directory\n\nInput: if exist "package.json" type "package.json"\nOutput: Prints package.json when it exists\n\nInput: mkdir tmp\nOutput: Creates directory tmp',
}
export type Limits = {
maxLines: number
maxBytes: number
}
export function parameterSchema(description: string) {
return Schema.Struct({
command: Schema.String.annotate({ description: "The command to execute" }),
timeout: Schema.optional(PositiveInt).annotate({ description: "Optional timeout in milliseconds" }),
workdir: Schema.optional(Schema.String).annotate({
description: `The working directory to run the command in. Defaults to the current directory. Use this instead of 'cd' commands.`,
}),
description: Schema.String.annotate({ description }),
})
}
export const Parameters = parameterSchema(descriptions.bash)
export type Parameters = Schema.Schema.Type<typeof Parameters>
function renderPrompt(template: string, values: Record<string, string>) {
return template.replace(/\$\{(\w+)\}/g, (_, key: string) => {
const value = values[key]
if (value === undefined) throw new Error(`Missing shell prompt value: ${key}`)
return value
})
}
function shellDisplayName(name: string) {
if (name === "pwsh") return "PowerShell (7+)"
if (name === "powershell") return "Windows PowerShell (5.1)"
if (name === "cmd") return "cmd.exe"
return name
}
function powershellNotes(name: string) {
if (name === "pwsh") {
return `# PowerShell (7+) shell notes
- This cross-platform shell supports pipeline chain operators (\`&&\` and \`||\`).
- Use double quotes for interpolated strings (\`"Hello $name"\`), single quotes for verbatim strings.
- Prefer full cmdlet names like \`Get-ChildItem\`, \`Set-Content\`, \`Remove-Item\`, and \`New-Item\` over aliases.
- Use \`$(...)\` for subexpressions. Use \`@(...)\` for array expressions.
- To call a native executable whose path contains spaces, use the call operator: \`& "path/to/exe" args\`.
- Escape special characters with the PowerShell backtick character.`
}
if (name === "powershell") {
return `# Windows PowerShell (5.1) shell notes
- Use \`cmd1; if ($?) { cmd2 }\` to chain dependent commands.
- Use double quotes for interpolated strings (\`"Hello $name"\`), single quotes for verbatim strings.
- Prefer full cmdlet names like \`Get-ChildItem\`, \`Set-Content\`, \`Remove-Item\`, and \`New-Item\` over aliases.
- Use \`$(...)\` for subexpressions. Use \`@(...)\` for array expressions.
- To call a native executable whose path contains spaces, use the call operator: \`& "path/to/exe" args\`.
- Escape special characters with the PowerShell backtick character.`
}
return ""
}
function chainGuidance(name: string) {
if (name === "powershell") {
return "If the commands depend on each other and must run sequentially, avoid '&&' in this shell because Windows PowerShell (5.1) does not support it. Use PowerShell conditionals such as `cmd1; if ($?) { cmd2 }` when later commands must depend on earlier success."
}
if (PS.has(name)) {
return "If the commands depend on each other and must run sequentially, use a single Shell call with '&&' to chain them together (e.g., `git add . && git commit -m \"message\" && git push`). For instance, if one operation must complete before another starts (like New-Item before Copy-Item, Write before Shell for git operations, or git add before git commit), run these operations sequentially instead."
}
if (CMD.has(name)) {
return "If the commands depend on each other and must run sequentially, use a single Shell call with `&&` to chain them together (e.g., `mkdir out && dir out`). For instance, if one operation must complete before another starts, run these operations sequentially instead."
}
return "If the commands depend on each other and must run sequentially, use a single Bash call with '&&' to chain them together (e.g., `git add . && git commit -m \"message\" && git push`). For instance, if one operation must complete before another starts (like mkdir before cp, Write before Bash for git operations, or git add before git commit), run these operations sequentially instead."
}
function bashCommandSection(chain: string, limits: Limits) {
return `Before executing the command, please follow these steps:
1. Directory Verification:
- If the command will create new directories or files, first use \`ls\` to verify the parent directory exists and is the correct location
- For example, before running "mkdir foo/bar", first use \`ls foo\` to check that "foo" exists and is the intended parent directory
2. Command Execution:
- Always quote file paths that contain spaces with double quotes (e.g., rm "path with spaces/file.txt")
- Examples of proper quoting:
- mkdir "/Users/name/My Documents" (correct)
- mkdir /Users/name/My Documents (incorrect - will fail)
- python "/path/with spaces/script.py" (correct)
- python /path/with spaces/script.py (incorrect - will fail)
- After ensuring proper quoting, execute the command.
- Capture the output of the command.
Usage notes:
- The command argument is required.
- You can specify an optional timeout in milliseconds. If not specified, commands will time out after 120000ms (2 minutes).
- It is very helpful if you write a clear, concise description of what this command does in 5-10 words.
- If the output exceeds ${limits.maxLines} lines or ${limits.maxBytes} bytes, it will be truncated and the full output will be written to a file. You can use Read with offset/limit to read specific sections or Grep to search the full content. Do NOT use \`head\`, \`tail\`, or other truncation commands to limit output; the full output will already be captured to a file for more precise searching.
- Avoid using Bash with the \`find\`, \`grep\`, \`cat\`, \`head\`, \`tail\`, \`sed\`, \`awk\`, or \`echo\` commands, unless explicitly instructed or when these commands are truly necessary for the task. Instead, always prefer using the dedicated tools for these commands:
- File search: Use Glob (NOT find or ls)
- Content search: Use Grep (NOT grep or rg)
- Read files: Use Read (NOT cat/head/tail)
- Edit files: Use Edit (NOT sed/awk)
- Write files: Use Write (NOT echo >/cat <<EOF)
- Communication: Output text directly (NOT echo/printf)
- When issuing multiple commands:
- If the commands are independent and can run in parallel, make multiple Shell tool calls in a single message. For example, if you need to run "git status" and "git diff", send a single message with two Shell tool calls in parallel.
- ${chain}
- Use ';' only when you need to run commands sequentially but don't care if earlier commands fail
- DO NOT use newlines to separate commands (newlines are ok in quoted strings)
- AVOID using \`cd <directory> && <command>\`. Use the \`workdir\` parameter to change directories instead.
<good-example>
Use workdir="/foo/bar" with command: pytest tests
</good-example>
<bad-example>
cd /foo/bar && pytest tests
</bad-example>`
}
function powershellCommandSection(name: string, chain: string, pathSep: string, limits: Limits) {
return `${powershellNotes(name)}
Before executing the command, please follow these steps:
1. Directory Verification:
- If the command will create new directories or files, first use \`Test-Path -LiteralPath <parent>\` to verify the parent directory exists and is the correct location
- For example, before creating \`foo${pathSep}bar\`, first use \`Test-Path -LiteralPath "foo"\` to check that \`foo\` exists and is the intended parent directory
2. Command Execution:
- Always quote file paths that contain spaces with double quotes (e.g., Remove-Item -LiteralPath "path with spaces${pathSep}file.txt")
- Examples of proper quoting:
- New-Item -ItemType Directory -Path "My Documents" (correct)
- New-Item -ItemType Directory -Path My Documents (incorrect - path is split)
- & "path with spaces${pathSep}script.ps1" (correct)
- path with spaces${pathSep}script.ps1 (incorrect - path is split and not invoked)
- After ensuring proper quoting, execute the command.
- Capture the output of the command.
Usage notes:
- The command argument is required.
- You can specify an optional timeout in milliseconds. If not specified, commands will time out after 120000ms (2 minutes).
- It is very helpful if you write a clear, concise description of what this command does in 5-10 words.
- If the output exceeds ${limits.maxLines} lines or ${limits.maxBytes} bytes, it will be truncated and the full output will be written to a file. You can use Read with offset/limit to read specific sections or Grep to search the full content. Do NOT use \`Select-Object -First\`, \`Select-Object -Last\`, or other truncation commands to limit output; the full output will already be captured to a file for more precise searching.
- Avoid using Shell with PowerShell file/content cmdlets unless explicitly instructed or when these cmdlets are truly necessary for the task. Instead, always prefer using the dedicated tools for these commands:
- File search: Use Glob (NOT Get-ChildItem)
- Content search: Use Grep (NOT Select-String)
- Read files: Use Read (NOT Get-Content)
- Edit files: Use Edit (NOT Set-Content)
- Write files: Use Write (NOT Set-Content/Out-File or here-strings)
- Communication: Output text directly (NOT Write-Output/Write-Host)
- When issuing multiple commands:
- If the commands are independent and can run in parallel, make multiple Shell tool calls in a single message. For example, if you need to run "git status" and "git diff", send a single message with two Shell tool calls in parallel.
- ${chain}
- Use \`;\` only when you need to run commands sequentially but don't care if earlier commands fail
- DO NOT use newlines to separate commands (newlines are ok in quoted strings)
- AVOID changing directories inside the command. Use the \`workdir\` parameter to change directories instead.
<good-example>
Use workdir="project${pathSep}subdir" with command: pytest tests
</good-example>
<bad-example>
${name === "powershell" ? `Set-Location -LiteralPath "project${pathSep}subdir"; if ($?) { pytest tests }` : `Set-Location -LiteralPath "project${pathSep}subdir" && pytest tests`}
</bad-example>`
}
function cmdCommandSection(chain: string, limits: Limits) {
return `# cmd.exe shell notes
- Use double quotes for paths with spaces.
- Use %VAR% for environment variables.
- Use \`if exist\` for existence checks.
- Use \`call\` when invoking batch files from another batch-style command.
Before executing the command, please follow these steps:
1. Directory Verification:
- If the command will create new directories or files, first use \`if exist\` to verify the parent directory exists and is the correct location
- For example, before creating \`foo\\bar\`, first use \`if exist "foo\\" dir "foo"\` to check that \`foo\` exists and is the intended parent directory
2. Command Execution:
- Always quote file paths that contain spaces with double quotes (e.g., del "path with spaces\\file.txt")
- Examples of proper quoting:
- mkdir "My Documents" (correct)
- mkdir My Documents (incorrect - path is split)
- call "path with spaces\\script.bat" (correct)
- path with spaces\\script.bat (incorrect - path is split and not invoked correctly)
- After ensuring proper quoting, execute the command.
- Capture the output of the command.
Usage notes:
- The command argument is required.
- You can specify an optional timeout in milliseconds. If not specified, commands will time out after 120000ms (2 minutes).
- It is very helpful if you write a clear, concise description of what this command does in 5-10 words.
- If the output exceeds ${limits.maxLines} lines or ${limits.maxBytes} bytes, it will be truncated and the full output will be written to a file. You can use Read with offset/limit to read specific sections or Grep to search the full content. Do NOT use \`more\` or other pagination commands to limit output; the full output will already be captured to a file for more precise searching.
- Avoid using Shell with cmd.exe file/content commands unless explicitly instructed or when these commands are truly necessary for the task. Instead, always prefer using the dedicated tools for these commands:
- File search: Use Glob (NOT dir /s)
- Content search: Use Grep (NOT findstr)
- Read files: Use Read (NOT type)
- Edit files: Use Edit (NOT copy)
- Write files: Use Write (NOT echo > file)
- Communication: Output text directly (NOT echo)
- When issuing multiple commands:
- If the commands are independent and can run in parallel, make multiple Shell tool calls in a single message. For example, if you need to run "dir" and "where cmd", send a single message with two Shell tool calls in parallel.
- ${chain}
- Use \`&\` only when you need to run commands sequentially but don't care if earlier commands fail
- DO NOT use newlines to separate commands (newlines are ok in quoted strings)
- AVOID changing directories inside the command. Use the \`workdir\` parameter to change directories instead.
<good-example>
Use workdir="project\\subdir" with command: dir
</good-example>
<bad-example>
cd /d "project\\subdir" && dir
</bad-example>`
}
function profile(name: string, platform: NodeJS.Platform, limits: Limits) {
const isPowerShell = PS.has(name)
const chain = chainGuidance(name)
if (CMD.has(name)) {
return {
intro: `Executes a given ${shellDisplayName(name)} command with optional timeout, ensuring proper handling and security measures.`,
workdirSection:
"All commands run in the current working directory by default. Use the `workdir` parameter if you need to run a command in a different directory. AVOID changing directories inside the command - use `workdir` instead.",
commandSection: cmdCommandSection(chain, limits),
gitCommands: "git commands",
toolName: "Shell",
gitCommandRestriction: "git commands",
createPrInstruction: "Create PR using a temporary body file so cmd.exe quoting stays simple.",
createPrExample: `(\n echo ## Summary\n echo - ^<1-3 bullet points^>\n) > pr-body.txt\ngh pr create --title "the pr title" --body-file pr-body.txt`,
parameterDescription: descriptions.cmd,
}
}
if (isPowerShell) {
return {
intro: `Executes a given ${shellDisplayName(name)} command with optional timeout, ensuring proper handling and security measures.`,
workdirSection:
"All commands run in the current working directory by default. Use the `workdir` parameter if you need to run a command in a different directory. AVOID changing directories inside the command - use `workdir` instead.",
commandSection: powershellCommandSection(name, chain, platform === "win32" ? "\\" : "/", limits),
gitCommands: "git commands",
toolName: "Shell",
gitCommandRestriction: "git commands",
createPrInstruction: "Create PR using gh pr create with a PowerShell here-string to pass the body correctly.",
createPrExample: `gh pr create --title "the pr title" --body @'
## Summary
- <1-3 bullet points>
'@`,
parameterDescription: descriptions.powershell,
}
}
return {
intro:
"Executes a given bash command in a persistent shell session with optional timeout, ensuring proper handling and security measures.",
workdirSection:
"All commands run in the current working directory by default. Use the `workdir` parameter if you need to run a command in a different directory. AVOID using `cd <directory> && <command>` patterns - use `workdir` instead.",
commandSection: bashCommandSection(chain, limits),
gitCommands: "bash commands",
toolName: "Shell",
gitCommandRestriction: "git bash commands",
createPrInstruction:
"Create PR using gh pr create with the format below. Use a HEREDOC to pass the body to ensure correct formatting.",
createPrExample: `gh pr create --title "the pr title" --body "$(cat <<'EOF'
## Summary
<1-3 bullet points>`,
parameterDescription: descriptions.bash,
}
}
export function render(name: string, platform: NodeJS.Platform, limits: Limits) {
const selected = profile(name, platform, limits)
return {
description: renderPrompt(DESCRIPTION, {
intro: selected.intro,
os: platform,
shell: name,
tmp: Global.Path.tmp,
workdirSection: selected.workdirSection,
commandSection: selected.commandSection,
gitCommands: selected.gitCommands,
toolName: selected.toolName,
gitCommandRestriction: selected.gitCommandRestriction,
createPrInstruction: selected.createPrInstruction,
createPrExample: selected.createPrExample,
}),
parameters: parameterSchema(selected.parameterDescription),
}
}
export * as ShellPrompt from "./prompt"

View File

@@ -1,54 +1,14 @@
Executes a given bash command in a persistent shell session with optional timeout, ensuring proper handling and security measures.
${intro}
Be aware: OS: ${os}, Shell: ${shell}
All commands run in the current working directory by default. Use the `workdir` parameter if you need to run a command in a different directory. AVOID using `cd <directory> && <command>` patterns - use `workdir` instead.
${workdirSection}
Use `${tmp}` for temporary work outside the workspace. This directory has already been created, already exists, and is pre-approved for external directory access.
IMPORTANT: This tool is for terminal operations like git, npm, docker, etc. DO NOT use it for file operations (reading, writing, editing, searching, finding files) - use the specialized tools for this instead.
Before executing the command, please follow these steps:
1. Directory Verification:
- If the command will create new directories or files, first use `ls` to verify the parent directory exists and is the correct location
- For example, before running "mkdir foo/bar", first use `ls foo` to check that "foo" exists and is the intended parent directory
2. Command Execution:
- Always quote file paths that contain spaces with double quotes (e.g., rm "path with spaces/file.txt")
- Examples of proper quoting:
- mkdir "/Users/name/My Documents" (correct)
- mkdir /Users/name/My Documents (incorrect - will fail)
- python "/path/with spaces/script.py" (correct)
- python /path/with spaces/script.py (incorrect - will fail)
- After ensuring proper quoting, execute the command.
- Capture the output of the command.
Usage notes:
- The command argument is required.
- You can specify an optional timeout in milliseconds. If not specified, commands will time out after 120000ms (2 minutes).
- It is very helpful if you write a clear, concise description of what this command does in 5-10 words.
- If the output exceeds ${maxLines} lines or ${maxBytes} bytes, it will be truncated and the full output will be written to a file. You can use Read with offset/limit to read specific sections or Grep to search the full content. Do NOT use `head`, `tail`, or other truncation commands to limit output; the full output will already be captured to a file for more precise searching.
- Avoid using Bash with the `find`, `grep`, `cat`, `head`, `tail`, `sed`, `awk`, or `echo` commands, unless explicitly instructed or when these commands are truly necessary for the task. Instead, always prefer using the dedicated tools for these commands:
- File search: Use Glob (NOT find or ls)
- Content search: Use Grep (NOT grep or rg)
- Read files: Use Read (NOT cat/head/tail)
- Edit files: Use Edit (NOT sed/awk)
- Write files: Use Write (NOT echo >/cat <<EOF)
- Communication: Output text directly (NOT echo/printf)
- When issuing multiple commands:
- If the commands are independent and can run in parallel, make multiple Bash tool calls in a single message. For example, if you need to run "git status" and "git diff", send a single message with two Bash tool calls in parallel.
- ${chaining}
- Use ';' only when you need to run commands sequentially but don't care if earlier commands fail
- DO NOT use newlines to separate commands (newlines are ok in quoted strings)
- AVOID using `cd <directory> && <command>`. Use the `workdir` parameter to change directories instead.
<good-example>
Use workdir="/foo/bar" with command: pytest tests
</good-example>
<bad-example>
cd /foo/bar && pytest tests
</bad-example>
${commandSection}
# Committing changes with git
@@ -67,7 +27,7 @@ Git Safety Protocol:
- CRITICAL: If you already pushed to remote, NEVER amend unless user explicitly requests it (requires force push)
- NEVER commit changes unless the user explicitly asks you to. It is VERY IMPORTANT to only commit when explicitly asked, otherwise the user will feel that you are being too proactive.
1. You can call multiple tools in a single response. When multiple independent pieces of information are requested and all commands are likely to succeed, run multiple tool calls in parallel for optimal performance. run the following bash commands in parallel, each using the Bash tool:
1. You can call multiple tools in a single response. When multiple independent pieces of information are requested and all commands are likely to succeed, run multiple tool calls in parallel for optimal performance. run the following ${gitCommands} in parallel, each using the ${toolName} tool:
- Run a git status command to see all untracked files.
- Run a git diff command to see both staged and unstaged changes that will be committed.
- Run a git log command to see recent commit messages, so that you can follow this repository's commit message style.
@@ -84,18 +44,18 @@ Git Safety Protocol:
4. If the commit fails due to pre-commit hook, fix the issue and create a NEW commit (see amend rules above)
Important notes:
- NEVER run additional commands to read or explore code, besides git bash commands
- NEVER run additional commands to read or explore code, besides ${gitCommandRestriction}
- NEVER use the TodoWrite or Task tools
- DO NOT push to the remote repository unless the user explicitly asks you to do so
- IMPORTANT: Never use git commands with the -i flag (like git rebase -i or git add -i) since they require interactive input which is not supported.
- If there are no changes to commit (i.e., no untracked files and no modifications), do not create an empty commit
# Creating pull requests
Use the gh command via the Bash tool for ALL GitHub-related tasks including working with issues, pull requests, checks, and releases. If given a GitHub URL use the gh command to get the information needed.
Use the gh command via the ${toolName} tool for ALL GitHub-related tasks including working with issues, pull requests, checks, and releases. If given a GitHub URL use the gh command to get the information needed.
IMPORTANT: When the user asks you to create a pull request, follow these steps carefully:
1. You can call multiple tools in a single response. When multiple independent pieces of information are requested and all commands are likely to succeed, run multiple tool calls in parallel for optimal performance. run the following bash commands in parallel using the Bash tool, in order to understand the current state of the branch since it diverged from the main branch:
1. You can call multiple tools in a single response. When multiple independent pieces of information are requested and all commands are likely to succeed, run multiple tool calls in parallel for optimal performance. run the following ${gitCommands} in parallel using the ${toolName} tool, in order to understand the current state of the branch since it diverged from the main branch:
- Run a git status command to see all untracked files
- Run a git diff command to see both staged and unstaged changes that will be committed
- Check if the current branch tracks a remote branch and is up to date with the remote, so you know if you need to push to the remote
@@ -104,11 +64,9 @@ IMPORTANT: When the user asks you to create a pull request, follow these steps c
3. You can call multiple tools in a single response. When multiple independent pieces of information are requested and all commands are likely to succeed, run multiple tool calls in parallel for optimal performance. run the following commands in parallel:
- Create new branch if needed
- Push to remote with -u flag if needed
- Create PR using gh pr create with the format below. Use a HEREDOC to pass the body to ensure correct formatting.
- ${createPrInstruction}
<example>
gh pr create --title "the pr title" --body "$(cat <<'EOF'
## Summary
<1-3 bullet points>
${createPrExample}
</example>
Important:

View File

@@ -90,7 +90,7 @@ function bodyWithChecks(ast: SchemaAST.AST): z.ZodTypeAny {
// Schema.withDecodingDefault also attaches encoding, but we want `.default(v)`
// on the inner Zod rather than a transform wrapper — so optional ASTs whose
// encoding resolves a default from Option.none() route through body()/opt().
const hasEncoding = ast.encoding?.length && ast._tag !== "Declaration"
const hasEncoding = ast.encoding?.length && (ast._tag !== "Declaration" || ast.typeParameters.length === 0)
const hasTransform = hasEncoding && !(SchemaAST.isOptional(ast) && extractDefault(ast) !== undefined)
const base = hasTransform ? encoded(ast) : body(ast)
return ast.checks?.length ? applyChecks(base, ast.checks, ast) : base
@@ -256,6 +256,8 @@ function body(ast: SchemaAST.AST): z.ZodTypeAny {
return array(ast)
case "Declaration":
return decl(ast)
case "Suspend":
return z.lazy(() => walk(ast.thunk()))
default:
return fail(ast)
}

View File

@@ -0,0 +1,53 @@
import { Identifier } from "@/id/id"
import { SyncEvent } from "@/sync"
import { withStatics } from "@/util/schema"
import { Flag } from "@opencode-ai/core/flag/flag"
import * as Schema from "effect/Schema"
export const ID = Schema.String.pipe(
Schema.brand("Event.ID"),
withStatics((s) => ({
create: () => s.make(Identifier.create("evt", "ascending")),
})),
)
export type ID = Schema.Schema.Type<typeof ID>
export function define<const Type extends string, Fields extends Schema.Struct.Fields>(input: {
type: Type
schema: Fields
aggregate: string
version?: number
}) {
const Payload = Schema.Struct({
id: ID,
metadata: Schema.Record(Schema.String, Schema.Unknown).pipe(Schema.optional),
type: Schema.Literal(input.type),
data: Schema.Struct(input.schema),
}).annotate({
identifier: input.type,
})
const Sync = SyncEvent.define({
type: input.type,
version: input.version ?? 1,
aggregate: input.aggregate,
schema: Payload.fields.data,
})
return Object.assign(Payload, {
Sync,
version: input.version,
aggregate: input.aggregate,
})
}
export function run<Def extends SyncEvent.Definition>(
def: Def,
data: SyncEvent.Event<Def>["data"],
options?: { publish?: boolean },
) {
if (!Flag.OPENCODE_EXPERIMENTAL_EVENT_SYSTEM) return
SyncEvent.run(def, data, options)
}
export * as EventV2 from "./event"

View File

@@ -1,261 +0,0 @@
import { produce, type WritableDraft } from "immer"
import { SessionEvent } from "./session-event"
import { SessionEntry } from "./session-entry"
export type MemoryState = {
entries: SessionEntry.Entry[]
pending: SessionEntry.Entry[]
}
export interface Adapter<Result> {
readonly getCurrentAssistant: () => SessionEntry.Assistant | undefined
readonly updateAssistant: (assistant: SessionEntry.Assistant) => void
readonly appendEntry: (entry: SessionEntry.Entry) => void
readonly appendPending: (entry: SessionEntry.Entry) => void
readonly finish: () => Result
}
export function memory(state: MemoryState): Adapter<MemoryState> {
const activeAssistantIndex = () =>
state.entries.findLastIndex((entry) => entry.type === "assistant" && !entry.time.completed)
return {
getCurrentAssistant() {
const index = activeAssistantIndex()
if (index < 0) return
const assistant = state.entries[index]
return assistant?.type === "assistant" ? assistant : undefined
},
updateAssistant(assistant) {
const index = activeAssistantIndex()
if (index < 0) return
const current = state.entries[index]
if (current?.type !== "assistant") return
state.entries[index] = assistant
},
appendEntry(entry) {
state.entries.push(entry)
},
appendPending(entry) {
state.pending.push(entry)
},
finish() {
return state
},
}
}
export function stepWith<Result>(adapter: Adapter<Result>, event: SessionEvent.Event): Result {
const currentAssistant = adapter.getCurrentAssistant()
type DraftAssistant = WritableDraft<SessionEntry.Assistant>
type DraftTool = WritableDraft<SessionEntry.AssistantTool>
type DraftText = WritableDraft<SessionEntry.AssistantText>
type DraftReasoning = WritableDraft<SessionEntry.AssistantReasoning>
const latestTool = (assistant: DraftAssistant | undefined, callID?: string) =>
assistant?.content.findLast(
(item): item is DraftTool => item.type === "tool" && (callID === undefined || item.callID === callID),
)
const latestText = (assistant: DraftAssistant | undefined) =>
assistant?.content.findLast((item): item is DraftText => item.type === "text")
const latestReasoning = (assistant: DraftAssistant | undefined) =>
assistant?.content.findLast((item): item is DraftReasoning => item.type === "reasoning")
SessionEvent.Event.match(event, {
prompt: (event) => {
const entry = SessionEntry.User.fromEvent(event)
if (currentAssistant) {
adapter.appendPending(entry)
return
}
adapter.appendEntry(entry)
},
synthetic: (event) => {
adapter.appendEntry(SessionEntry.Synthetic.fromEvent(event))
},
"step.started": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
draft.time.completed = event.timestamp
}),
)
}
adapter.appendEntry(SessionEntry.Assistant.fromEvent(event))
},
"step.ended": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
draft.time.completed = event.timestamp
draft.cost = event.cost
draft.tokens = event.tokens
}),
)
}
},
"text.started": () => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
draft.content.push({
type: "text",
text: "",
})
}),
)
}
},
"text.delta": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
const match = latestText(draft)
if (match) match.text += event.delta
}),
)
}
},
"text.ended": () => {},
"tool.input.started": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
draft.content.push({
type: "tool",
callID: event.callID,
name: event.name,
time: {
created: event.timestamp,
},
state: {
status: "pending",
input: "",
},
})
}),
)
}
},
"tool.input.delta": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
const match = latestTool(draft, event.callID)
// oxlint-disable-next-line no-base-to-string -- event.delta is a Schema.String (runtime string)
if (match && match.state.status === "pending") match.state.input += event.delta
}),
)
}
},
"tool.input.ended": () => {},
"tool.called": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
const match = latestTool(draft, event.callID)
if (match) {
match.time.ran = event.timestamp
match.state = {
status: "running",
input: event.input,
}
}
}),
)
}
},
"tool.success": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
const match = latestTool(draft, event.callID)
if (match && match.state.status === "running") {
match.state = {
status: "completed",
input: match.state.input,
output: event.output ?? "",
title: event.title,
metadata: event.metadata ?? {},
attachments: [...(event.attachments ?? [])],
}
}
}),
)
}
},
"tool.error": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
const match = latestTool(draft, event.callID)
if (match && match.state.status === "running") {
match.state = {
status: "error",
error: event.error,
input: match.state.input,
metadata: event.metadata ?? {},
}
}
}),
)
}
},
"reasoning.started": () => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
draft.content.push({
type: "reasoning",
text: "",
})
}),
)
}
},
"reasoning.delta": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
const match = latestReasoning(draft)
if (match) match.text += event.delta
}),
)
}
},
"reasoning.ended": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
const match = latestReasoning(draft)
if (match) match.text = event.text
}),
)
}
},
retried: (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
draft.retries = [...(draft.retries ?? []), SessionEntry.AssistantRetry.fromEvent(event)]
}),
)
}
},
compacted: (event) => {
adapter.appendEntry(SessionEntry.Compaction.fromEvent(event))
},
})
return adapter.finish()
}
export function step(old: MemoryState, event: SessionEvent.Event): MemoryState {
return produce(old, (draft) => {
stepWith(memory(draft as MemoryState), event)
})
}
export * as SessionEntryStepper from "./session-entry-stepper"

View File

@@ -1,220 +0,0 @@
import { Schema } from "effect"
import { NonNegativeInt } from "@/util/schema"
import { SessionEvent } from "./session-event"
export const ID = SessionEvent.ID
export type ID = Schema.Schema.Type<typeof ID>
const Base = {
id: SessionEvent.ID,
metadata: Schema.Record(Schema.String, Schema.Unknown).pipe(Schema.optional),
time: Schema.Struct({
created: Schema.DateTimeUtc,
}),
}
export class User extends Schema.Class<User>("Session.Entry.User")({
...Base,
text: SessionEvent.Prompt.fields.text,
files: SessionEvent.Prompt.fields.files,
agents: SessionEvent.Prompt.fields.agents,
type: Schema.Literal("user"),
time: Schema.Struct({
created: Schema.DateTimeUtc,
}),
}) {
static fromEvent(event: SessionEvent.Prompt) {
return new User({
id: event.id,
type: "user",
metadata: event.metadata,
text: event.text,
files: event.files,
agents: event.agents,
time: { created: event.timestamp },
})
}
}
export class Synthetic extends Schema.Class<Synthetic>("Session.Entry.Synthetic")({
...SessionEvent.Synthetic.fields,
...Base,
type: Schema.Literal("synthetic"),
}) {
static fromEvent(event: SessionEvent.Synthetic) {
return new Synthetic({
...event,
time: { created: event.timestamp },
})
}
}
export class ToolStatePending extends Schema.Class<ToolStatePending>("Session.Entry.ToolState.Pending")({
status: Schema.Literal("pending"),
input: Schema.String,
}) {}
export class ToolStateRunning extends Schema.Class<ToolStateRunning>("Session.Entry.ToolState.Running")({
status: Schema.Literal("running"),
input: Schema.Record(Schema.String, Schema.Unknown),
title: Schema.String.pipe(Schema.optional),
metadata: Schema.Record(Schema.String, Schema.Unknown).pipe(Schema.optional),
}) {}
export class ToolStateCompleted extends Schema.Class<ToolStateCompleted>("Session.Entry.ToolState.Completed")({
status: Schema.Literal("completed"),
input: Schema.Record(Schema.String, Schema.Unknown),
output: Schema.String,
title: Schema.String,
metadata: Schema.Record(Schema.String, Schema.Unknown),
attachments: SessionEvent.FileAttachment.pipe(Schema.Array, Schema.optional),
}) {}
export class ToolStateError extends Schema.Class<ToolStateError>("Session.Entry.ToolState.Error")({
status: Schema.Literal("error"),
input: Schema.Record(Schema.String, Schema.Unknown),
error: Schema.String,
metadata: Schema.Record(Schema.String, Schema.Unknown).pipe(Schema.optional),
}) {}
export const ToolState = Schema.Union([ToolStatePending, ToolStateRunning, ToolStateCompleted, ToolStateError]).pipe(
Schema.toTaggedUnion("status"),
)
export type ToolState = Schema.Schema.Type<typeof ToolState>
export class AssistantTool extends Schema.Class<AssistantTool>("Session.Entry.Assistant.Tool")({
type: Schema.Literal("tool"),
callID: Schema.String,
name: Schema.String,
state: ToolState,
time: Schema.Struct({
created: Schema.DateTimeUtc,
ran: Schema.DateTimeUtc.pipe(Schema.optional),
completed: Schema.DateTimeUtc.pipe(Schema.optional),
pruned: Schema.DateTimeUtc.pipe(Schema.optional),
}),
}) {}
export class AssistantText extends Schema.Class<AssistantText>("Session.Entry.Assistant.Text")({
type: Schema.Literal("text"),
text: Schema.String,
}) {}
export class AssistantReasoning extends Schema.Class<AssistantReasoning>("Session.Entry.Assistant.Reasoning")({
type: Schema.Literal("reasoning"),
text: Schema.String,
}) {}
export class AssistantRetry extends Schema.Class<AssistantRetry>("Session.Entry.Assistant.Retry")({
attempt: NonNegativeInt,
error: SessionEvent.RetryError,
time: Schema.Struct({
created: Schema.DateTimeUtc,
}),
}) {
static fromEvent(event: SessionEvent.Retried) {
return new AssistantRetry({
attempt: event.attempt,
error: event.error,
time: {
created: event.timestamp,
},
})
}
}
export const AssistantContent = Schema.Union([AssistantText, AssistantReasoning, AssistantTool]).pipe(
Schema.toTaggedUnion("type"),
)
export type AssistantContent = Schema.Schema.Type<typeof AssistantContent>
export class Assistant extends Schema.Class<Assistant>("Session.Entry.Assistant")({
...Base,
type: Schema.Literal("assistant"),
content: AssistantContent.pipe(Schema.Array),
retries: AssistantRetry.pipe(Schema.Array, Schema.optional),
cost: Schema.Finite.pipe(Schema.optional),
tokens: Schema.Struct({
input: NonNegativeInt,
output: NonNegativeInt,
reasoning: NonNegativeInt,
cache: Schema.Struct({
read: NonNegativeInt,
write: NonNegativeInt,
}),
}).pipe(Schema.optional),
error: Schema.String.pipe(Schema.optional),
time: Schema.Struct({
created: Schema.DateTimeUtc,
completed: Schema.DateTimeUtc.pipe(Schema.optional),
}),
}) {
static fromEvent(event: SessionEvent.Step.Started) {
return new Assistant({
id: event.id,
type: "assistant",
time: {
created: event.timestamp,
},
content: [],
retries: [],
})
}
}
export class Compaction extends Schema.Class<Compaction>("Session.Entry.Compaction")({
...SessionEvent.Compacted.fields,
type: Schema.Literal("compaction"),
...Base,
}) {
static fromEvent(event: SessionEvent.Compacted) {
return new Compaction({
...event,
type: "compaction",
time: { created: event.timestamp },
})
}
}
export const Entry = Schema.Union([User, Synthetic, Assistant, Compaction]).pipe(Schema.toTaggedUnion("type"))
export type Entry = Schema.Schema.Type<typeof Entry>
export type Type = Entry["type"]
/*
export interface Interface {
readonly decode: (row: typeof SessionEntryTable.$inferSelect) => Entry
readonly fromSession: (sessionID: SessionID) => Effect.Effect<Entry[], never>
}
export class Service extends Context.Service<Service, Interface>()("@opencode/SessionEntry") {}
export const layer: Layer.Layer<Service, never, never> = Layer.effect(
Service,
Effect.gen(function* () {
const decodeEntry = Schema.decodeUnknownSync(Entry)
const decode: (typeof Service.Service)["decode"] = (row) => decodeEntry({ ...row, id: row.id, type: row.type })
const fromSession = Effect.fn("SessionEntry.fromSession")(function* (sessionID: SessionID) {
return Database.use((db) =>
db
.select()
.from(SessionEntryTable)
.where(eq(SessionEntryTable.session_id, sessionID))
.orderBy(SessionEntryTable.id)
.all()
.map((row) => decode(row)),
)
})
return Service.of({
decode,
fromSession,
})
}),
)
*/
export * as SessionEntry from "./session-entry"

View File

@@ -1,128 +1,118 @@
import { Identifier } from "@/id/id"
import { NonNegativeInt, withStatics } from "@/util/schema"
import * as DateTime from "effect/DateTime"
import { SessionID } from "@/session/schema"
import { NonNegativeInt } from "@/util/schema"
import { EventV2 } from "./event"
import { FileAttachment, Prompt } from "./session-prompt"
import { Schema } from "effect"
export { FileAttachment }
import { ToolOutput } from "./tool-output"
import { ModelID, ProviderID } from "@/provider/schema"
export namespace SessionEvent {
export const ID = Schema.String.pipe(
Schema.brand("Session.Event.ID"),
withStatics((s) => ({
create: () => s.make(Identifier.create("evt", "ascending")),
})),
)
export type ID = Schema.Schema.Type<typeof ID>
type Stamp = Schema.Schema.Type<typeof Schema.DateTimeUtc>
type BaseInput = {
id?: ID
metadata?: Record<string, unknown>
timestamp?: Stamp
}
export const Source = Schema.Struct({
start: NonNegativeInt,
end: NonNegativeInt,
text: Schema.String,
}).annotate({
identifier: "session.next.event.source",
})
export type Source = Schema.Schema.Type<typeof Source>
const Base = {
id: ID,
metadata: Schema.Record(Schema.String, Schema.Unknown).pipe(Schema.optional),
timestamp: Schema.DateTimeUtc,
}
const Base = {
timestamp: Schema.DateTimeUtcFromMillis,
sessionID: SessionID,
}
export class Source extends Schema.Class<Source>("Session.Event.Source")({
start: NonNegativeInt,
end: NonNegativeInt,
text: Schema.String,
}) {}
export class FileAttachment extends Schema.Class<FileAttachment>("Session.Event.FileAttachment")({
uri: Schema.String,
mime: Schema.String,
name: Schema.String.pipe(Schema.optional),
description: Schema.String.pipe(Schema.optional),
source: Source.pipe(Schema.optional),
}) {
static create(input: FileAttachment) {
return new FileAttachment({
uri: input.uri,
mime: input.mime,
name: input.name,
description: input.description,
source: input.source,
})
}
}
export class AgentAttachment extends Schema.Class<AgentAttachment>("Session.Event.AgentAttachment")({
name: Schema.String,
source: Source.pipe(Schema.optional),
}) {}
export class RetryError extends Schema.Class<RetryError>("Session.Event.Retry.Error")({
message: Schema.String,
statusCode: NonNegativeInt.pipe(Schema.optional),
isRetryable: Schema.Boolean,
responseHeaders: Schema.Record(Schema.String, Schema.String).pipe(Schema.optional),
responseBody: Schema.String.pipe(Schema.optional),
metadata: Schema.Record(Schema.String, Schema.String).pipe(Schema.optional),
}) {}
export class Prompt extends Schema.Class<Prompt>("Session.Event.Prompt")({
export const AgentSwitched = EventV2.define({
type: "session.next.agent.switched",
aggregate: "sessionID",
version: 1,
schema: {
...Base,
type: Schema.Literal("prompt"),
text: Schema.String,
files: Schema.Array(FileAttachment).pipe(Schema.optional),
agents: Schema.Array(AgentAttachment).pipe(Schema.optional),
}) {
static create(input: BaseInput & { text: string; files?: FileAttachment[]; agents?: AgentAttachment[] }) {
return new Prompt({
id: input.id ?? ID.create(),
type: "prompt",
timestamp: input.timestamp ?? DateTime.makeUnsafe(Date.now()),
metadata: input.metadata,
text: input.text,
files: input.files,
agents: input.agents,
})
}
}
agent: Schema.String,
},
})
export type AgentSwitched = Schema.Schema.Type<typeof AgentSwitched>
export class Synthetic extends Schema.Class<Synthetic>("Session.Event.Synthetic")({
export const ModelSwitched = EventV2.define({
type: "session.next.model.switched",
aggregate: "sessionID",
version: 1,
schema: {
...Base,
type: Schema.Literal("synthetic"),
text: Schema.String,
}) {
static create(input: BaseInput & { text: string }) {
return new Synthetic({
id: input.id ?? ID.create(),
type: "synthetic",
timestamp: input.timestamp ?? DateTime.makeUnsafe(Date.now()),
metadata: input.metadata,
text: input.text,
})
}
}
id: ModelID,
providerID: ProviderID,
variant: Schema.String.pipe(Schema.optional),
},
})
export type ModelSwitched = Schema.Schema.Type<typeof ModelSwitched>
export namespace Step {
export class Started extends Schema.Class<Started>("Session.Event.Step.Started")({
export const Prompted = EventV2.define({
type: "session.next.prompted",
aggregate: "sessionID",
version: 1,
schema: {
...Base,
prompt: Prompt,
},
})
export type Prompted = Schema.Schema.Type<typeof Prompted>
export const Synthetic = EventV2.define({
type: "session.next.synthetic",
aggregate: "sessionID",
schema: {
...Base,
text: Schema.String,
},
})
export type Synthetic = Schema.Schema.Type<typeof Synthetic>
export namespace Shell {
export const Started = EventV2.define({
type: "session.next.shell.started",
aggregate: "sessionID",
schema: {
...Base,
type: Schema.Literal("step.started"),
callID: Schema.String,
command: Schema.String,
},
})
export type Started = Schema.Schema.Type<typeof Started>
export const Ended = EventV2.define({
type: "session.next.shell.ended",
aggregate: "sessionID",
schema: {
...Base,
callID: Schema.String,
output: Schema.String,
},
})
export type Ended = Schema.Schema.Type<typeof Ended>
}
export namespace Step {
export const Started = EventV2.define({
type: "session.next.step.started",
aggregate: "sessionID",
schema: {
...Base,
agent: Schema.String,
model: Schema.Struct({
id: Schema.String,
providerID: Schema.String,
variant: Schema.String.pipe(Schema.optional),
}),
}) {
static create(input: BaseInput & { model: { id: string; providerID: string; variant?: string } }) {
return new Started({
id: input.id ?? ID.create(),
type: "step.started",
timestamp: input.timestamp ?? DateTime.makeUnsafe(Date.now()),
metadata: input.metadata,
model: input.model,
})
}
}
snapshot: Schema.String.pipe(Schema.optional),
},
})
export type Started = Schema.Schema.Type<typeof Started>
export class Ended extends Schema.Class<Ended>("Session.Event.Step.Ended")({
export const Ended = EventV2.define({
type: "session.next.step.ended",
aggregate: "sessionID",
schema: {
...Base,
type: Schema.Literal("step.ended"),
reason: Schema.String,
finish: Schema.String,
cost: Schema.Finite,
tokens: Schema.Struct({
input: NonNegativeInt,
@@ -133,177 +123,118 @@ export namespace SessionEvent {
write: NonNegativeInt,
}),
}),
}) {
static create(input: BaseInput & { reason: string; cost: number; tokens: Ended["tokens"] }) {
return new Ended({
id: input.id ?? ID.create(),
type: "step.ended",
timestamp: input.timestamp ?? DateTime.makeUnsafe(Date.now()),
metadata: input.metadata,
reason: input.reason,
cost: input.cost,
tokens: input.tokens,
})
}
}
}
snapshot: Schema.String.pipe(Schema.optional),
},
})
export type Ended = Schema.Schema.Type<typeof Ended>
}
export namespace Text {
export class Started extends Schema.Class<Started>("Session.Event.Text.Started")({
export namespace Text {
export const Started = EventV2.define({
type: "session.next.text.started",
aggregate: "sessionID",
schema: {
...Base,
type: Schema.Literal("text.started"),
}) {
static create(input: BaseInput = {}) {
return new Started({
id: input.id ?? ID.create(),
type: "text.started",
timestamp: input.timestamp ?? DateTime.makeUnsafe(Date.now()),
metadata: input.metadata,
})
}
}
},
})
export type Started = Schema.Schema.Type<typeof Started>
export class Delta extends Schema.Class<Delta>("Session.Event.Text.Delta")({
export const Delta = EventV2.define({
type: "session.next.text.delta",
aggregate: "sessionID",
schema: {
...Base,
type: Schema.Literal("text.delta"),
delta: Schema.String,
}) {
static create(input: BaseInput & { delta: string }) {
return new Delta({
id: input.id ?? ID.create(),
type: "text.delta",
timestamp: input.timestamp ?? DateTime.makeUnsafe(Date.now()),
metadata: input.metadata,
delta: input.delta,
})
}
}
},
})
export type Delta = Schema.Schema.Type<typeof Delta>
export class Ended extends Schema.Class<Ended>("Session.Event.Text.Ended")({
export const Ended = EventV2.define({
type: "session.next.text.ended",
aggregate: "sessionID",
schema: {
...Base,
type: Schema.Literal("text.ended"),
text: Schema.String,
}) {
static create(input: BaseInput & { text: string }) {
return new Ended({
id: input.id ?? ID.create(),
type: "text.ended",
timestamp: input.timestamp ?? DateTime.makeUnsafe(Date.now()),
metadata: input.metadata,
text: input.text,
})
}
}
}
},
})
export type Ended = Schema.Schema.Type<typeof Ended>
}
export namespace Reasoning {
export class Started extends Schema.Class<Started>("Session.Event.Reasoning.Started")({
export namespace Reasoning {
export const Started = EventV2.define({
type: "session.next.reasoning.started",
aggregate: "sessionID",
schema: {
...Base,
type: Schema.Literal("reasoning.started"),
}) {
static create(input: BaseInput = {}) {
return new Started({
id: input.id ?? ID.create(),
type: "reasoning.started",
timestamp: input.timestamp ?? DateTime.makeUnsafe(Date.now()),
metadata: input.metadata,
})
}
}
reasoningID: Schema.String,
},
})
export type Started = Schema.Schema.Type<typeof Started>
export class Delta extends Schema.Class<Delta>("Session.Event.Reasoning.Delta")({
export const Delta = EventV2.define({
type: "session.next.reasoning.delta",
aggregate: "sessionID",
schema: {
...Base,
type: Schema.Literal("reasoning.delta"),
reasoningID: Schema.String,
delta: Schema.String,
}) {
static create(input: BaseInput & { delta: string }) {
return new Delta({
id: input.id ?? ID.create(),
type: "reasoning.delta",
timestamp: input.timestamp ?? DateTime.makeUnsafe(Date.now()),
metadata: input.metadata,
delta: input.delta,
})
}
}
},
})
export type Delta = Schema.Schema.Type<typeof Delta>
export class Ended extends Schema.Class<Ended>("Session.Event.Reasoning.Ended")({
export const Ended = EventV2.define({
type: "session.next.reasoning.ended",
aggregate: "sessionID",
schema: {
...Base,
type: Schema.Literal("reasoning.ended"),
reasoningID: Schema.String,
text: Schema.String,
}) {
static create(input: BaseInput & { text: string }) {
return new Ended({
id: input.id ?? ID.create(),
type: "reasoning.ended",
timestamp: input.timestamp ?? DateTime.makeUnsafe(Date.now()),
metadata: input.metadata,
text: input.text,
})
}
}
}
},
})
export type Ended = Schema.Schema.Type<typeof Ended>
}
export namespace Tool {
export namespace Input {
export class Started extends Schema.Class<Started>("Session.Event.Tool.Input.Started")({
export namespace Tool {
export namespace Input {
export const Started = EventV2.define({
type: "session.next.tool.input.started",
aggregate: "sessionID",
schema: {
...Base,
callID: Schema.String,
name: Schema.String,
type: Schema.Literal("tool.input.started"),
}) {
static create(input: BaseInput & { callID: string; name: string }) {
return new Started({
id: input.id ?? ID.create(),
type: "tool.input.started",
timestamp: input.timestamp ?? DateTime.makeUnsafe(Date.now()),
metadata: input.metadata,
callID: input.callID,
name: input.name,
})
}
}
},
})
export type Started = Schema.Schema.Type<typeof Started>
export class Delta extends Schema.Class<Delta>("Session.Event.Tool.Input.Delta")({
export const Delta = EventV2.define({
type: "session.next.tool.input.delta",
aggregate: "sessionID",
schema: {
...Base,
callID: Schema.String,
type: Schema.Literal("tool.input.delta"),
delta: Schema.String,
}) {
static create(input: BaseInput & { callID: string; delta: string }) {
return new Delta({
id: input.id ?? ID.create(),
type: "tool.input.delta",
timestamp: input.timestamp ?? DateTime.makeUnsafe(Date.now()),
metadata: input.metadata,
callID: input.callID,
delta: input.delta,
})
}
}
},
})
export type Delta = Schema.Schema.Type<typeof Delta>
export class Ended extends Schema.Class<Ended>("Session.Event.Tool.Input.Ended")({
export const Ended = EventV2.define({
type: "session.next.tool.input.ended",
aggregate: "sessionID",
schema: {
...Base,
callID: Schema.String,
type: Schema.Literal("tool.input.ended"),
text: Schema.String,
}) {
static create(input: BaseInput & { callID: string; text: string }) {
return new Ended({
id: input.id ?? ID.create(),
type: "tool.input.ended",
timestamp: input.timestamp ?? DateTime.makeUnsafe(Date.now()),
metadata: input.metadata,
callID: input.callID,
text: input.text,
})
}
}
}
},
})
export type Ended = Schema.Schema.Type<typeof Ended>
}
export class Called extends Schema.Class<Called>("Session.Event.Tool.Called")({
export const Called = EventV2.define({
type: "session.next.tool.called",
aggregate: "sessionID",
schema: {
...Base,
type: Schema.Literal("tool.called"),
callID: Schema.String,
tool: Schema.String,
input: Schema.Record(Schema.String, Schema.Unknown),
@@ -311,148 +242,155 @@ export namespace SessionEvent {
executed: Schema.Boolean,
metadata: Schema.Record(Schema.String, Schema.Unknown).pipe(Schema.optional),
}),
}) {
static create(
input: BaseInput & {
callID: string
tool: string
input: Record<string, unknown>
provider: Called["provider"]
},
) {
return new Called({
id: input.id ?? ID.create(),
type: "tool.called",
timestamp: input.timestamp ?? DateTime.makeUnsafe(Date.now()),
metadata: input.metadata,
callID: input.callID,
tool: input.tool,
input: input.input,
provider: input.provider,
})
}
}
},
})
export type Called = Schema.Schema.Type<typeof Called>
export class Success extends Schema.Class<Success>("Session.Event.Tool.Success")({
export const Progress = EventV2.define({
type: "session.next.tool.progress",
aggregate: "sessionID",
schema: {
...Base,
type: Schema.Literal("tool.success"),
callID: Schema.String,
title: Schema.String,
output: Schema.String.pipe(Schema.optional),
attachments: Schema.Array(FileAttachment).pipe(Schema.optional),
structured: ToolOutput.Structured,
content: Schema.Array(ToolOutput.Content),
},
})
export type Progress = Schema.Schema.Type<typeof Progress>
export const Success = EventV2.define({
type: "session.next.tool.success",
aggregate: "sessionID",
schema: {
...Base,
callID: Schema.String,
structured: ToolOutput.Structured,
content: Schema.Array(ToolOutput.Content),
provider: Schema.Struct({
executed: Schema.Boolean,
metadata: Schema.Record(Schema.String, Schema.Unknown).pipe(Schema.optional),
}),
}) {
static create(
input: BaseInput & {
callID: string
title: string
output?: string
attachments?: FileAttachment[]
provider: Success["provider"]
},
) {
return new Success({
id: input.id ?? ID.create(),
type: "tool.success",
timestamp: input.timestamp ?? DateTime.makeUnsafe(Date.now()),
metadata: input.metadata,
callID: input.callID,
title: input.title,
output: input.output,
attachments: input.attachments,
provider: input.provider,
})
}
}
},
})
export type Success = Schema.Schema.Type<typeof Success>
export class Error extends Schema.Class<Error>("Session.Event.Tool.Error")({
export const Error = EventV2.define({
type: "session.next.tool.error",
aggregate: "sessionID",
schema: {
...Base,
type: Schema.Literal("tool.error"),
callID: Schema.String,
error: Schema.String,
error: Schema.Struct({
type: Schema.String,
message: Schema.String,
}),
provider: Schema.Struct({
executed: Schema.Boolean,
metadata: Schema.Record(Schema.String, Schema.Unknown).pipe(Schema.optional),
}),
}) {
static create(input: BaseInput & { callID: string; error: string; provider: Error["provider"] }) {
return new Error({
id: input.id ?? ID.create(),
type: "tool.error",
timestamp: input.timestamp ?? DateTime.makeUnsafe(Date.now()),
metadata: input.metadata,
callID: input.callID,
error: input.error,
provider: input.provider,
})
}
}
}
},
})
export type Error = Schema.Schema.Type<typeof Error>
}
export class Retried extends Schema.Class<Retried>("Session.Event.Retried")({
export const RetryError = Schema.Struct({
message: Schema.String,
statusCode: NonNegativeInt.pipe(Schema.optional),
isRetryable: Schema.Boolean,
responseHeaders: Schema.Record(Schema.String, Schema.String).pipe(Schema.optional),
responseBody: Schema.String.pipe(Schema.optional),
metadata: Schema.Record(Schema.String, Schema.String).pipe(Schema.optional),
}).annotate({
identifier: "session.next.retry_error",
})
export type RetryError = Schema.Schema.Type<typeof RetryError>
export const Retried = EventV2.define({
type: "session.next.retried",
aggregate: "sessionID",
schema: {
...Base,
type: Schema.Literal("retried"),
attempt: NonNegativeInt,
error: RetryError,
}) {
static create(input: BaseInput & { attempt: number; error: RetryError }) {
return new Retried({
id: input.id ?? ID.create(),
type: "retried",
timestamp: input.timestamp ?? DateTime.makeUnsafe(Date.now()),
metadata: input.metadata,
attempt: input.attempt,
error: input.error,
})
}
}
},
})
export type Retried = Schema.Schema.Type<typeof Retried>
export class Compacted extends Schema.Class<Compacted>("Session.Event.Compated")({
...Base,
type: Schema.Literal("compacted"),
auto: Schema.Boolean,
overflow: Schema.Boolean.pipe(Schema.optional),
}) {
static create(input: BaseInput & { auto: boolean; overflow?: boolean }) {
return new Compacted({
id: input.id ?? ID.create(),
type: "compacted",
timestamp: input.timestamp ?? DateTime.makeUnsafe(Date.now()),
metadata: input.metadata,
auto: input.auto,
overflow: input.overflow,
})
}
}
export const Event = Schema.Union(
[
Prompt,
Synthetic,
Step.Started,
Step.Ended,
Text.Started,
Text.Delta,
Text.Ended,
Tool.Input.Started,
Tool.Input.Delta,
Tool.Input.Ended,
Tool.Called,
Tool.Success,
Tool.Error,
Reasoning.Started,
Reasoning.Delta,
Reasoning.Ended,
Retried,
Compacted,
],
{
mode: "oneOf",
export namespace Compaction {
export const Started = EventV2.define({
type: "session.next.compaction.started",
aggregate: "sessionID",
schema: {
...Base,
reason: Schema.Union([Schema.Literal("auto"), Schema.Literal("manual")]),
},
).pipe(Schema.toTaggedUnion("type"))
export type Event = Schema.Schema.Type<typeof Event>
export type Type = Event["type"]
})
export type Started = Schema.Schema.Type<typeof Started>
export const Delta = EventV2.define({
type: "session.next.compaction.delta",
aggregate: "sessionID",
schema: {
...Base,
text: Schema.String,
},
})
export const Ended = EventV2.define({
type: "session.next.compaction.ended",
aggregate: "sessionID",
schema: {
...Base,
text: Schema.String,
include: Schema.String.pipe(Schema.optional),
},
})
export type Ended = Schema.Schema.Type<typeof Ended>
}
export const All = Schema.Union(
[
AgentSwitched,
ModelSwitched,
Prompted,
Synthetic,
Shell.Started,
Shell.Ended,
Step.Started,
Step.Ended,
Text.Started,
Text.Delta,
Text.Ended,
Tool.Input.Started,
Tool.Input.Delta,
Tool.Input.Ended,
Tool.Called,
Tool.Progress,
Tool.Success,
Tool.Error,
Reasoning.Started,
Reasoning.Delta,
Reasoning.Ended,
Retried,
Compaction.Started,
Compaction.Delta,
Compaction.Ended,
],
{
mode: "oneOf",
},
).pipe(Schema.toTaggedUnion("type"))
// user
// assistant
// assistant
// assistant
// user
// compaction marker
// -> text
// assistant
export type Event = Schema.Schema.Type<typeof All>
export type Type = Event["type"]
export * as SessionEvent from "./session-event"

View File

@@ -0,0 +1,411 @@
import { produce, type WritableDraft } from "immer"
import { SessionEvent } from "./session-event"
import { SessionMessage } from "./session-message"
export type MemoryState = {
messages: SessionMessage.Message[]
}
export interface Adapter<Result> {
readonly getCurrentAssistant: () => SessionMessage.Assistant | undefined
readonly getCurrentCompaction: () => SessionMessage.Compaction | undefined
readonly getCurrentShell: (callID: string) => SessionMessage.Shell | undefined
readonly updateAssistant: (assistant: SessionMessage.Assistant) => void
readonly updateCompaction: (compaction: SessionMessage.Compaction) => void
readonly updateShell: (shell: SessionMessage.Shell) => void
readonly appendMessage: (message: SessionMessage.Message) => void
readonly finish: () => Result
}
export function memory(state: MemoryState): Adapter<MemoryState> {
const activeAssistantIndex = () =>
state.messages.findLastIndex((message) => message.type === "assistant" && !message.time.completed)
const activeCompactionIndex = () => state.messages.findLastIndex((message) => message.type === "compaction")
const activeShellIndex = (callID: string) =>
state.messages.findLastIndex((message) => message.type === "shell" && message.callID === callID)
return {
getCurrentAssistant() {
const index = activeAssistantIndex()
if (index < 0) return
const assistant = state.messages[index]
return assistant?.type === "assistant" ? assistant : undefined
},
getCurrentCompaction() {
const index = activeCompactionIndex()
if (index < 0) return
const compaction = state.messages[index]
return compaction?.type === "compaction" ? compaction : undefined
},
getCurrentShell(callID) {
const index = activeShellIndex(callID)
if (index < 0) return
const shell = state.messages[index]
return shell?.type === "shell" ? shell : undefined
},
updateAssistant(assistant) {
const index = activeAssistantIndex()
if (index < 0) return
const current = state.messages[index]
if (current?.type !== "assistant") return
state.messages[index] = assistant
},
updateCompaction(compaction) {
const index = activeCompactionIndex()
if (index < 0) return
const current = state.messages[index]
if (current?.type !== "compaction") return
state.messages[index] = compaction
},
updateShell(shell) {
const index = activeShellIndex(shell.callID)
if (index < 0) return
const current = state.messages[index]
if (current?.type !== "shell") return
state.messages[index] = shell
},
appendMessage(message) {
state.messages.push(message)
},
finish() {
return state
},
}
}
export function update<Result>(adapter: Adapter<Result>, event: SessionEvent.Event): Result {
const currentAssistant = adapter.getCurrentAssistant()
type DraftAssistant = WritableDraft<SessionMessage.Assistant>
type DraftTool = WritableDraft<SessionMessage.AssistantTool>
type DraftText = WritableDraft<SessionMessage.AssistantText>
type DraftReasoning = WritableDraft<SessionMessage.AssistantReasoning>
const latestTool = (assistant: DraftAssistant | undefined, callID?: string) =>
assistant?.content.findLast(
(item): item is DraftTool => item.type === "tool" && (callID === undefined || item.id === callID),
)
const latestText = (assistant: DraftAssistant | undefined) =>
assistant?.content.findLast((item): item is DraftText => item.type === "text")
const latestReasoning = (assistant: DraftAssistant | undefined, reasoningID: string) =>
assistant?.content.findLast(
(item): item is DraftReasoning => item.type === "reasoning" && item.id === reasoningID,
)
SessionEvent.All.match(event, {
"session.next.agent.switched": (event) => {
adapter.appendMessage(
new SessionMessage.AgentSwitched({
id: event.id,
type: "agent-switched",
metadata: event.metadata,
agent: event.data.agent,
time: { created: event.data.timestamp },
}),
)
},
"session.next.model.switched": (event) => {
adapter.appendMessage(
new SessionMessage.ModelSwitched({
id: event.id,
type: "model-switched",
metadata: event.metadata,
model: {
id: event.data.id,
providerID: event.data.providerID,
variant: event.data.variant,
},
time: { created: event.data.timestamp },
}),
)
},
"session.next.prompted": (event) => {
adapter.appendMessage(
new SessionMessage.User({
id: event.id,
type: "user",
metadata: event.metadata,
text: event.data.prompt.text,
files: event.data.prompt.files,
agents: event.data.prompt.agents,
time: { created: event.data.timestamp },
}),
)
},
"session.next.synthetic": (event) => {
adapter.appendMessage(
new SessionMessage.Synthetic({
sessionID: event.data.sessionID,
text: event.data.text,
id: event.id,
type: "synthetic",
time: { created: event.data.timestamp },
}),
)
},
"session.next.shell.started": (event) => {
adapter.appendMessage(
new SessionMessage.Shell({
id: event.id,
type: "shell",
metadata: event.metadata,
callID: event.data.callID,
command: event.data.command,
output: "",
time: { created: event.data.timestamp },
}),
)
},
"session.next.shell.ended": (event) => {
const currentShell = adapter.getCurrentShell(event.data.callID)
if (currentShell) {
adapter.updateShell(
produce(currentShell, (draft) => {
draft.output = event.data.output
draft.time.completed = event.data.timestamp
}),
)
}
},
"session.next.step.started": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
draft.time.completed = event.data.timestamp
}),
)
}
adapter.appendMessage(
new SessionMessage.Assistant({
id: event.id,
type: "assistant",
agent: event.data.agent,
model: event.data.model,
time: { created: event.data.timestamp },
content: [],
snapshot: event.data.snapshot ? { start: event.data.snapshot } : undefined,
}),
)
},
"session.next.step.ended": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
draft.time.completed = event.data.timestamp
draft.finish = event.data.finish
draft.cost = event.data.cost
draft.tokens = event.data.tokens
if (event.data.snapshot) draft.snapshot = { ...draft.snapshot, end: event.data.snapshot }
}),
)
}
},
"session.next.text.started": () => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
draft.content.push({
type: "text",
text: "",
})
}),
)
}
},
"session.next.text.delta": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
const match = latestText(draft)
if (match) match.text += event.data.delta
}),
)
}
},
"session.next.text.ended": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
const match = latestText(draft)
if (match) match.text = event.data.text
}),
)
}
},
"session.next.tool.input.started": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
draft.content.push({
type: "tool",
id: event.data.callID,
name: event.data.name,
time: {
created: event.data.timestamp,
},
state: {
status: "pending",
input: "",
},
})
}),
)
}
},
"session.next.tool.input.delta": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
const match = latestTool(draft, event.data.callID)
// oxlint-disable-next-line no-base-to-string -- event.delta is a Schema.String (runtime string)
if (match && match.state.status === "pending") match.state.input += event.data.delta
}),
)
}
},
"session.next.tool.input.ended": () => {},
"session.next.tool.called": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
const match = latestTool(draft, event.data.callID)
if (match) {
match.provider = event.data.provider
match.time.ran = event.data.timestamp
match.state = {
status: "running",
input: event.data.input,
structured: {},
content: [],
}
}
}),
)
}
},
"session.next.tool.progress": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
const match = latestTool(draft, event.data.callID)
if (match && match.state.status === "running") {
match.state.structured = event.data.structured
match.state.content = [...event.data.content]
}
}),
)
}
},
"session.next.tool.success": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
const match = latestTool(draft, event.data.callID)
if (match && match.state.status === "running") {
match.provider = event.data.provider
match.time.completed = event.data.timestamp
match.state = {
status: "completed",
input: match.state.input,
structured: event.data.structured,
content: [...event.data.content],
}
}
}),
)
}
},
"session.next.tool.error": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
const match = latestTool(draft, event.data.callID)
if (match && match.state.status === "running") {
match.provider = event.data.provider
match.time.completed = event.data.timestamp
match.state = {
status: "error",
error: event.data.error,
input: match.state.input,
structured: match.state.structured,
content: match.state.content,
}
}
}),
)
}
},
"session.next.reasoning.started": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
draft.content.push({
type: "reasoning",
id: event.data.reasoningID,
text: "",
})
}),
)
}
},
"session.next.reasoning.delta": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
const match = latestReasoning(draft, event.data.reasoningID)
if (match) match.text += event.data.delta
}),
)
}
},
"session.next.reasoning.ended": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
const match = latestReasoning(draft, event.data.reasoningID)
if (match) match.text = event.data.text
}),
)
}
},
"session.next.retried": () => {},
"session.next.compaction.started": (event) => {
adapter.appendMessage(
new SessionMessage.Compaction({
id: event.id,
type: "compaction",
metadata: event.metadata,
reason: event.data.reason,
summary: "",
time: { created: event.data.timestamp },
}),
)
},
"session.next.compaction.delta": (event) => {
const currentCompaction = adapter.getCurrentCompaction()
if (currentCompaction) {
adapter.updateCompaction(
produce(currentCompaction, (draft) => {
draft.summary += event.data.text
}),
)
}
},
"session.next.compaction.ended": (event) => {
const currentCompaction = adapter.getCurrentCompaction()
if (currentCompaction) {
adapter.updateCompaction(
produce(currentCompaction, (draft) => {
draft.summary = event.data.text
draft.include = event.data.include
}),
)
}
},
})
return adapter.finish()
}
export * as SessionMessageUpdater from "./session-message-updater"

View File

@@ -0,0 +1,177 @@
import { Schema } from "effect"
import { Prompt } from "./session-prompt"
import { SessionEvent } from "./session-event"
import { EventV2 } from "./event"
import { ToolOutput } from "./tool-output"
export const ID = EventV2.ID
export type ID = Schema.Schema.Type<typeof ID>
const Base = {
id: ID,
metadata: Schema.Record(Schema.String, Schema.Unknown).pipe(Schema.optional),
time: Schema.Struct({
created: Schema.DateTimeUtcFromMillis,
}),
}
export class AgentSwitched extends Schema.Class<AgentSwitched>("Session.Message.AgentSwitched")({
...Base,
type: Schema.Literal("agent-switched"),
agent: SessionEvent.AgentSwitched.fields.data.fields.agent,
}) {}
export class ModelSwitched extends Schema.Class<ModelSwitched>("Session.Message.ModelSwitched")({
...Base,
type: Schema.Literal("model-switched"),
model: Schema.Struct({
id: SessionEvent.ModelSwitched.fields.data.fields.id,
providerID: SessionEvent.ModelSwitched.fields.data.fields.providerID,
variant: SessionEvent.ModelSwitched.fields.data.fields.variant,
}),
}) {}
export class User extends Schema.Class<User>("Session.Message.User")({
...Base,
text: Prompt.fields.text,
files: Prompt.fields.files,
agents: Prompt.fields.agents,
type: Schema.Literal("user"),
time: Schema.Struct({
created: Schema.DateTimeUtcFromMillis,
}),
}) {}
export class Synthetic extends Schema.Class<Synthetic>("Session.Message.Synthetic")({
...Base,
sessionID: SessionEvent.Synthetic.fields.data.fields.sessionID,
text: SessionEvent.Synthetic.fields.data.fields.text,
type: Schema.Literal("synthetic"),
}) {}
export class Shell extends Schema.Class<Shell>("Session.Message.Shell")({
...Base,
type: Schema.Literal("shell"),
callID: SessionEvent.Shell.Started.fields.data.fields.callID,
command: SessionEvent.Shell.Started.fields.data.fields.command,
output: Schema.String,
time: Schema.Struct({
created: Schema.DateTimeUtcFromMillis,
completed: Schema.DateTimeUtcFromMillis.pipe(Schema.optional),
}),
}) {}
export class ToolStatePending extends Schema.Class<ToolStatePending>("Session.Message.ToolState.Pending")({
status: Schema.Literal("pending"),
input: Schema.String,
}) {}
export class ToolStateRunning extends Schema.Class<ToolStateRunning>("Session.Message.ToolState.Running")({
status: Schema.Literal("running"),
input: Schema.Record(Schema.String, Schema.Unknown),
structured: ToolOutput.Structured,
content: ToolOutput.Content.pipe(Schema.Array),
}) {}
export class ToolStateCompleted extends Schema.Class<ToolStateCompleted>("Session.Message.ToolState.Completed")({
status: Schema.Literal("completed"),
input: Schema.Record(Schema.String, Schema.Unknown),
attachments: SessionEvent.FileAttachment.pipe(Schema.Array, Schema.optional),
content: ToolOutput.Content.pipe(Schema.Array),
structured: ToolOutput.Structured,
}) {}
export class ToolStateError extends Schema.Class<ToolStateError>("Session.Message.ToolState.Error")({
status: Schema.Literal("error"),
input: Schema.Record(Schema.String, Schema.Unknown),
content: ToolOutput.Content.pipe(Schema.Array),
structured: ToolOutput.Structured,
error: Schema.Struct({
type: Schema.String,
message: Schema.String,
}),
}) {}
export const ToolState = Schema.Union([ToolStatePending, ToolStateRunning, ToolStateCompleted, ToolStateError]).pipe(
Schema.toTaggedUnion("status"),
)
export type ToolState = Schema.Schema.Type<typeof ToolState>
export class AssistantTool extends Schema.Class<AssistantTool>("Session.Message.Assistant.Tool")({
type: Schema.Literal("tool"),
id: Schema.String,
name: Schema.String,
provider: Schema.Struct({
executed: Schema.Boolean,
metadata: Schema.Record(Schema.String, Schema.Unknown).pipe(Schema.optional),
}).pipe(Schema.optional),
state: ToolState,
time: Schema.Struct({
created: Schema.DateTimeUtcFromMillis,
ran: Schema.DateTimeUtcFromMillis.pipe(Schema.optional),
completed: Schema.DateTimeUtcFromMillis.pipe(Schema.optional),
pruned: Schema.DateTimeUtcFromMillis.pipe(Schema.optional),
}),
}) {}
export class AssistantText extends Schema.Class<AssistantText>("Session.Message.Assistant.Text")({
type: Schema.Literal("text"),
text: Schema.String,
}) {}
export class AssistantReasoning extends Schema.Class<AssistantReasoning>("Session.Message.Assistant.Reasoning")({
type: Schema.Literal("reasoning"),
id: Schema.String,
text: Schema.String,
}) {}
export const AssistantContent = Schema.Union([AssistantText, AssistantReasoning, AssistantTool]).pipe(
Schema.toTaggedUnion("type"),
)
export type AssistantContent = Schema.Schema.Type<typeof AssistantContent>
export class Assistant extends Schema.Class<Assistant>("Session.Message.Assistant")({
...Base,
type: Schema.Literal("assistant"),
agent: Schema.String,
model: SessionEvent.Step.Started.fields.data.fields.model,
content: AssistantContent.pipe(Schema.Array),
snapshot: Schema.Struct({
start: Schema.String.pipe(Schema.optional),
end: Schema.String.pipe(Schema.optional),
}).pipe(Schema.optional),
finish: Schema.String.pipe(Schema.optional),
cost: Schema.Number.pipe(Schema.optional),
tokens: Schema.Struct({
input: Schema.Number,
output: Schema.Number,
reasoning: Schema.Number,
cache: Schema.Struct({
read: Schema.Number,
write: Schema.Number,
}),
}).pipe(Schema.optional),
error: Schema.String.pipe(Schema.optional),
time: Schema.Struct({
created: Schema.DateTimeUtcFromMillis,
completed: Schema.DateTimeUtcFromMillis.pipe(Schema.optional),
}),
}) {}
export class Compaction extends Schema.Class<Compaction>("Session.Message.Compaction")({
type: Schema.Literal("compaction"),
reason: SessionEvent.Compaction.Started.fields.data.fields.reason,
summary: Schema.String,
include: Schema.String.pipe(Schema.optional),
...Base,
}) {}
export const Message = Schema.Union([AgentSwitched, ModelSwitched, User, Synthetic, Shell, Assistant, Compaction])
.pipe(Schema.toTaggedUnion("type"))
.annotate({ identifier: "Session.Message" })
export type Message = Schema.Schema.Type<typeof Message>
export type Type = Message["type"]
export * as SessionMessage from "./session-message"

View File

@@ -0,0 +1,36 @@
import * as Schema from "effect/Schema"
export class Source extends Schema.Class<Source>("Prompt.Source")({
start: Schema.Number,
end: Schema.Number,
text: Schema.String,
}) {}
export class FileAttachment extends Schema.Class<FileAttachment>("Prompt.FileAttachment")({
uri: Schema.String,
mime: Schema.String,
name: Schema.String.pipe(Schema.optional),
description: Schema.String.pipe(Schema.optional),
source: Source.pipe(Schema.optional),
}) {
static create(input: FileAttachment) {
return new FileAttachment({
uri: input.uri,
mime: input.mime,
name: input.name,
description: input.description,
source: input.source,
})
}
}
export class AgentAttachment extends Schema.Class<AgentAttachment>("Prompt.AgentAttachment")({
name: Schema.String,
source: Source.pipe(Schema.optional),
}) {}
export class Prompt extends Schema.Class<Prompt>("Prompt")({
text: Schema.String,
files: Schema.Array(FileAttachment).pipe(Schema.optional),
agents: Schema.Array(AgentAttachment).pipe(Schema.optional),
}) {}

View File

@@ -1,69 +1,241 @@
import { Context, Layer, Schema, Effect } from "effect"
import { SessionEntry } from "./session-entry"
import { Struct } from "effect"
import { Session } from "@/session/session"
import { SessionMessageTable, SessionTable } from "@/session/session.sql"
import { SessionID } from "@/session/schema"
import { WorkspaceID } from "@/control-plane/schema"
import { and, asc, desc, eq, gt, gte, isNull, like, lt, or, type SQL } from "@/storage/db"
import * as Database from "@/storage/db"
import { Context, DateTime, Effect, Layer, Schema } from "effect"
import { SessionMessage } from "./session-message"
import type { Prompt } from "./session-prompt"
import { EventV2 } from "./event"
import { ProjectID } from "@/project/schema"
import { ModelID, ProviderID } from "@/provider/schema"
import { SessionEvent } from "./session-event"
export const ID = SessionID
export const Delivery = Schema.Union([Schema.Literal("immediate"), Schema.Literal("deferred")]).annotate({
identifier: "Session.Delivery",
})
export type Delivery = Schema.Schema.Type<typeof Delivery>
export type ID = Schema.Schema.Type<typeof ID>
export class PromptInput extends Schema.Class<PromptInput>("Session.PromptInput")({
...Struct.omit(SessionEntry.User.fields, ["time", "type"]),
id: Schema.optionalKey(SessionEntry.ID),
sessionID: ID,
}) {}
export class CreateInput extends Schema.Class<CreateInput>("Session.CreateInput")({
id: Schema.optionalKey(ID),
}) {}
export const DefaultDelivery = "immediate" satisfies Delivery
export class Info extends Schema.Class<Info>("Session.Info")({
id: ID,
id: SessionID,
parentID: SessionID.pipe(Schema.optional),
projectID: ProjectID,
workspaceID: WorkspaceID.pipe(Schema.optional),
path: Schema.String.pipe(Schema.optional),
agent: Schema.String.pipe(Schema.optional),
model: Schema.Struct({
id: Schema.String,
providerID: Schema.String,
modelID: Schema.String,
id: ModelID,
providerID: ProviderID,
variant: Schema.String.pipe(Schema.optional),
}).pipe(Schema.optional),
time: Schema.Struct({
created: Schema.DateTimeUtcFromMillis,
updated: Schema.DateTimeUtcFromMillis,
archived: Schema.DateTimeUtcFromMillis.pipe(Schema.optional),
}),
title: Schema.String,
/*
slug: Schema.String,
directory: Schema.String,
path: optionalOmitUndefined(Schema.String),
parentID: optionalOmitUndefined(SessionID),
summary: optionalOmitUndefined(Summary),
share: optionalOmitUndefined(Share),
title: Schema.String,
version: Schema.String,
time: Time,
permission: optionalOmitUndefined(Permission.Ruleset),
revert: optionalOmitUndefined(Revert),
*/
}) {}
export interface Interface {
fromID: (id: ID) => Effect.Effect<Info>
create: (input: CreateInput) => Effect.Effect<Info>
prompt: (input: PromptInput) => Effect.Effect<SessionEntry.User>
readonly list: (input: {
limit?: number
order?: "asc" | "desc"
directory?: string
path?: string
workspaceID?: WorkspaceID
roots?: boolean
start?: number
search?: string
cursor?: {
id: SessionID
time: number
direction: "previous" | "next"
}
}) => Effect.Effect<Info[], never>
readonly messages: (input: {
sessionID: SessionID
limit?: number
order?: "asc" | "desc"
cursor?: {
id: SessionMessage.ID
time: number
direction: "previous" | "next"
}
}) => Effect.Effect<SessionMessage.Message[], never>
readonly prompt: (input: {
id?: EventV2.ID
sessionID: SessionID
prompt: Prompt
delivery?: Delivery
}) => Effect.Effect<SessionMessage.User, never>
readonly switchAgent: (input: { sessionID: SessionID; agent: string }) => Effect.Effect<void, never>
readonly switchModel: (input: {
sessionID: SessionID
id: ModelID
providerID: ProviderID
variant?: string
}) => Effect.Effect<void, never>
readonly compact: (sessionID: SessionID) => Effect.Effect<void, never>
readonly wait: (sessionID: SessionID) => Effect.Effect<void, never>
}
export class Service extends Context.Service<Service, Interface>()("Session.Service") {}
export class Service extends Context.Service<Service, Interface>()("@opencode/v2/Session") {}
export const layer = Layer.effect(Service)(
export const layer = Layer.effect(
Service,
Effect.gen(function* () {
const session = yield* Session.Service
const decodeMessage = Schema.decodeUnknownSync(SessionMessage.Message)
const create: Interface["create"] = Effect.fn("Session.create")(function* (_input) {
throw new Error("Not implemented")
})
const decode = (row: typeof SessionMessageTable.$inferSelect) =>
decodeMessage({ ...row.data, id: row.id, type: row.type })
const prompt: Interface["prompt"] = Effect.fn("Session.prompt")(function* (_input) {
throw new Error("Not implemented")
})
function fromRow(row: typeof SessionTable.$inferSelect): Info {
return {
id: SessionID.make(row.id),
projectID: ProjectID.make(row.project_id),
workspaceID: row.workspace_id ? WorkspaceID.make(row.workspace_id) : undefined,
title: row.title,
parentID: row.parent_id ? SessionID.make(row.parent_id) : undefined,
path: row.path ?? "",
agent: row.agent ?? undefined,
model: row.model
? {
id: ModelID.make(row.model.id),
providerID: ProviderID.make(row.model.providerID),
variant: row.model.variant,
}
: undefined,
time: {
created: DateTime.makeUnsafe(row.time_created),
updated: DateTime.makeUnsafe(row.time_updated),
archived: row.time_archived ? DateTime.makeUnsafe(row.time_archived) : undefined,
},
}
}
const fromID: Interface["fromID"] = Effect.fn("Session.fromID")(function* (id) {
const match = yield* session.get(id)
return fromV1(match)
})
const result: Interface = {
list: Effect.fn("V2Session.list")(function* (input) {
const direction = input.cursor?.direction ?? "next"
let order = input.order ?? "desc"
// Query the adjacent rows in reverse, then flip them back into the requested order below.
if (direction === "previous" && order === "asc") order = "desc"
if (direction === "previous" && order === "desc") order = "asc"
const conditions: SQL[] = []
if (input.directory) conditions.push(eq(SessionTable.directory, input.directory))
if (input.path)
conditions.push(or(eq(SessionTable.path, input.path), like(SessionTable.path, `${input.path}/%`))!)
if (input.workspaceID) conditions.push(eq(SessionTable.workspace_id, input.workspaceID))
if (input.roots) conditions.push(isNull(SessionTable.parent_id))
if (input.start) conditions.push(gte(SessionTable.time_created, input.start))
if (input.search) conditions.push(like(SessionTable.title, `%${input.search}%`))
if (input.cursor) {
conditions.push(
order === "asc"
? or(
gt(SessionTable.time_created, input.cursor.time),
and(eq(SessionTable.time_created, input.cursor.time), gt(SessionTable.id, input.cursor.id)),
)!
: or(
lt(SessionTable.time_created, input.cursor.time),
and(eq(SessionTable.time_created, input.cursor.time), lt(SessionTable.id, input.cursor.id)),
)!,
)
}
const query = Database.Client()
.select()
.from(SessionTable)
.where(conditions.length > 0 ? and(...conditions) : undefined)
.orderBy(
order === "asc" ? asc(SessionTable.time_created) : desc(SessionTable.time_created),
order === "asc" ? asc(SessionTable.id) : desc(SessionTable.id),
)
return Service.of({
create,
prompt,
fromID,
})
const rows = input.limit === undefined ? query.all() : query.limit(input.limit).all()
return (direction === "previous" ? rows.toReversed() : rows).map((row) => fromRow(row))
}),
messages: Effect.fn("V2Session.messages")(function* (input) {
const direction = input.cursor?.direction ?? "next"
let order = input.order ?? "desc"
// Query the adjacent rows in reverse, then flip them back into the requested order below.
if (direction === "previous" && order === "asc") order = "desc"
if (direction === "previous" && order === "desc") order = "asc"
const boundary = input.cursor
? order === "asc"
? or(
gt(SessionMessageTable.time_created, input.cursor.time),
and(
eq(SessionMessageTable.time_created, input.cursor.time),
gt(SessionMessageTable.id, input.cursor.id),
),
)
: or(
lt(SessionMessageTable.time_created, input.cursor.time),
and(
eq(SessionMessageTable.time_created, input.cursor.time),
lt(SessionMessageTable.id, input.cursor.id),
),
)
: undefined
const where = boundary
? and(eq(SessionMessageTable.session_id, input.sessionID), boundary)
: eq(SessionMessageTable.session_id, input.sessionID)
const rows = Database.use((db) => {
const query = db
.select()
.from(SessionMessageTable)
.where(where)
.orderBy(
order === "asc" ? asc(SessionMessageTable.time_created) : desc(SessionMessageTable.time_created),
order === "asc" ? asc(SessionMessageTable.id) : desc(SessionMessageTable.id),
)
const rows = input.limit === undefined ? query.all() : query.limit(input.limit).all()
return direction === "previous" ? rows.toReversed() : rows
})
return rows.map((row) => decode(row))
}),
prompt: Effect.fn("V2Session.prompt")(function* (_input) {
return {} as any
}),
switchAgent: Effect.fn("V2Session.switchAgent")(function* (input) {
EventV2.run(SessionEvent.AgentSwitched.Sync, {
sessionID: input.sessionID,
timestamp: DateTime.makeUnsafe(Date.now()),
agent: input.agent,
})
}),
switchModel: Effect.fn("V2Session.switchModel")(function* (input) {
EventV2.run(SessionEvent.ModelSwitched.Sync, {
sessionID: input.sessionID,
timestamp: DateTime.makeUnsafe(Date.now()),
id: input.id,
providerID: input.providerID,
variant: input.variant,
})
}),
compact: Effect.fn("V2Session.compact")(function* (_sessionID) {}),
wait: Effect.fn("V2Session.wait")(function* (_sessionID) {}),
}
return Service.of(result)
}),
)
function fromV1(input: Session.Info): Info {
return new Info({
id: ID.make(input.id),
})
}
export const defaultLayer = layer
export * as SessionV2 from "./session"

View File

@@ -0,0 +1,18 @@
export * as ToolOutput from "./tool-output"
import { Schema } from "effect"
export class TextContent extends Schema.Class<TextContent>("Tool.TextContent")({
type: Schema.Literal("text"),
text: Schema.String,
}) {}
export class FileContent extends Schema.Class<FileContent>("Tool.FileContent")({
type: Schema.Literal("file"),
uri: Schema.String,
mime: Schema.String,
name: Schema.String.pipe(Schema.optional),
}) {}
export const Content = Schema.Union([TextContent, FileContent]).pipe(Schema.toTaggedUnion("type"))
export const Structured = Schema.Record(Schema.String, Schema.Any)

View File

@@ -58,6 +58,7 @@ function toolEvent(
raw: opts.raw,
}
const payload: EventMessagePartUpdated = {
id: `evt_${opts.callID}`,
type: "message.part.updated",
properties: {
sessionID: sessionId,

View File

@@ -48,7 +48,7 @@ test("build agent has correct default properties", async () => {
expect(build).toBeDefined()
expect(build?.mode).toBe("primary")
expect(build?.native).toBe(true)
expect(evalPerm(build, "edit")).toBe("allow")
expect(evalPerm(build, "edit")).toBe("ask")
expect(evalPerm(build, "bash")).toBe("allow")
},
})
@@ -228,8 +228,8 @@ test("agent permission config merges with defaults", async () => {
expect(build).toBeDefined()
// Specific pattern is denied
expect(Permission.evaluate("bash", "rm -rf *", build!.permission).action).toBe("deny")
// Edit still allowed
expect(evalPerm(build, "edit")).toBe("allow")
// Edit still asks (default behavior)
expect(evalPerm(build, "edit")).toBe("ask")
},
})
})

View File

@@ -1,51 +0,0 @@
import { afterEach, expect, test } from "bun:test"
import path from "path"
import { pathToFileURL } from "url"
import { AppRuntime } from "../../src/effect/app-runtime"
import { Agent } from "../../src/agent/agent"
import { Instance } from "../../src/project/instance"
import { disposeAllInstances, tmpdir } from "../fixture/fixture"
afterEach(async () => {
await disposeAllInstances()
})
test("plugin-registered agents appear in Agent.list", async () => {
await using tmp = await tmpdir({
init: async (dir) => {
const pluginFile = path.join(dir, "plugin.ts")
await Bun.write(
pluginFile,
[
"export default async () => ({",
" config: async (cfg) => {",
" cfg.agent = cfg.agent ?? {}",
" cfg.agent.plugin_added = {",
' description: "Added by a plugin via the config hook",',
' mode: "subagent",',
" }",
" },",
"})",
"",
].join("\n"),
)
await Bun.write(
path.join(dir, "opencode.json"),
JSON.stringify({
$schema: "https://opencode.ai/config.json",
plugin: [pathToFileURL(pluginFile).href],
}),
)
},
})
await Instance.provide({
directory: tmp.path,
fn: async () => {
const agents = await AppRuntime.runPromise(Agent.Service.use((svc) => svc.list()))
const added = agents.find((agent) => agent.name === "plugin_added")
expect(added?.description).toBe("Added by a plugin via the config hook")
expect(added?.mode).toBe("subagent")
},
})
})

View File

@@ -25,6 +25,7 @@ function event(payload: Event, input: { directory: string; workspace?: string })
function vcs(branch: string): Event {
return {
id: `evt_vcs_${branch}`,
type: "vcs.branch.updated",
properties: {
branch,
@@ -34,6 +35,7 @@ function vcs(branch: string): Event {
function update(version: string): Event {
return {
id: `evt_update_${version}`,
type: "installation.update-available",
properties: {
version,

View File

@@ -12,9 +12,8 @@ import { Account } from "../../src/account/account"
import { AccessToken, AccountID, OrgID } from "../../src/account/schema"
import { AppFileSystem } from "@opencode-ai/core/filesystem"
import { Env } from "../../src/env"
import { provideTestInstance, provideTmpdirInstance } from "../fixture/fixture"
import { disposeAllInstances, provideTmpdirInstance } from "../fixture/fixture"
import { tmpdir } from "../fixture/fixture"
import { InstanceRuntime } from "@/project/instance-runtime"
import { CrossSpawnSpawner } from "@opencode-ai/core/cross-spawn-spawner"
import { testEffect } from "../lib/effect"
@@ -42,12 +41,6 @@ const emptyAuth = Layer.mock(Auth.Service)({
const testFlock = EffectFlock.defaultLayer
const noopNpm = Layer.mock(Npm.Service)({
install: () => Effect.void,
add: () => Effect.die("not implemented"),
which: () => Effect.succeed(Option.none()),
})
const layer = Config.layer.pipe(
Layer.provide(testFlock),
Layer.provide(AppFileSystem.defaultLayer),
@@ -55,7 +48,7 @@ const layer = Config.layer.pipe(
Layer.provide(emptyAuth),
Layer.provide(emptyAccount),
Layer.provideMerge(infra),
Layer.provide(noopNpm),
Layer.provide(Npm.defaultLayer),
)
const it = testEffect(layer)
@@ -64,17 +57,9 @@ const load = () => Effect.runPromise(Config.Service.use((svc) => svc.get()).pipe
const save = (config: Config.Info) =>
Effect.runPromise(Config.Service.use((svc) => svc.update(config)).pipe(Effect.scoped, Effect.provide(layer)))
const saveGlobal = (config: Config.Info) =>
Effect.runPromise(
Config.Service.use((svc) => svc.updateGlobal(config)).pipe(
Effect.map((result) => result.info),
Effect.scoped,
Effect.provide(layer),
),
)
const clear = async (wait = false) => {
await Effect.runPromise(Config.Service.use((svc) => svc.invalidate()).pipe(Effect.scoped, Effect.provide(layer)))
if (wait) await InstanceRuntime.disposeAllInstances()
}
Effect.runPromise(Config.Service.use((svc) => svc.updateGlobal(config)).pipe(Effect.scoped, Effect.provide(layer)))
const clear = (wait = false) =>
Effect.runPromise(Config.Service.use((svc) => svc.invalidate(wait)).pipe(Effect.scoped, Effect.provide(layer)))
const listDirs = () =>
Effect.runPromise(Config.Service.use((svc) => svc.directories()).pipe(Effect.scoped, Effect.provide(layer)))
const ready = () =>
@@ -123,7 +108,7 @@ async function check(map: (dir: string) => string) {
},
})
} finally {
await InstanceRuntime.disposeAllInstances()
await disposeAllInstances()
;(Global.Path as { config: string }).config = prev
await clear()
}
@@ -498,7 +483,6 @@ test("resolves env templates in account config with account token", async () =>
Layer.provide(emptyAuth),
Layer.provide(fakeAccount),
Layer.provideMerge(infra),
Layer.provide(noopNpm),
)
try {
@@ -509,7 +493,7 @@ test("resolves env templates in account config with account token", async () =>
expect(config.provider?.["opencode"]?.options?.apiKey).toBe("st_test_token")
}),
),
).pipe(Effect.scoped, Effect.provide(layer), Effect.runPromise)
).pipe(Effect.scoped, Effect.provide(layer), Effect.provide(Npm.defaultLayer), Effect.runPromise)
} finally {
if (originalControlToken !== undefined) {
process.env["OPENCODE_CONSOLE_TOKEN"] = originalControlToken
@@ -566,7 +550,7 @@ test("validates config schema and throws on invalid fields", async () => {
})
},
})
await provideTestInstance({
await Instance.provide({
directory: tmp.path,
fn: async () => {
// Strict schema should throw an error for invalid fields
@@ -581,7 +565,7 @@ test("throws error for invalid JSON", async () => {
await Filesystem.write(path.join(dir, "opencode.json"), "{ invalid json }")
},
})
await provideTestInstance({
await Instance.provide({
directory: tmp.path,
fn: async () => {
await expect(load()).rejects.toThrow()
@@ -1002,6 +986,11 @@ test("installs dependencies in writable OPENCODE_CONFIG_DIR", async () => {
const prev = process.env.OPENCODE_CONFIG_DIR
process.env.OPENCODE_CONFIG_DIR = tmp.extra
const noopNpm = Layer.mock(Npm.Service)({
install: () => Effect.void,
add: () => Effect.die("not implemented"),
which: () => Effect.succeed(Option.none()),
})
const testLayer = Config.layer.pipe(
Layer.provide(testFlock),
Layer.provide(AppFileSystem.defaultLayer),
@@ -1072,7 +1061,7 @@ test("resolves scoped npm plugins in config", async () => {
},
})
await provideTestInstance({
await Instance.provide({
directory: tmp.path,
fn: async () => {
const config = await load()
@@ -1110,7 +1099,7 @@ test("merges plugin arrays from global and local configs", async () => {
},
})
await provideTestInstance({
await Instance.provide({
directory: path.join(tmp.path, "project"),
fn: async () => {
const config = await load()
@@ -1269,7 +1258,7 @@ test("deduplicates duplicate plugins from global and local configs", async () =>
},
})
await provideTestInstance({
await Instance.provide({
directory: path.join(tmp.path, "project"),
fn: async () => {
const config = await load()
@@ -1318,7 +1307,7 @@ test("keeps plugin origins aligned with merged plugin list", async () => {
},
})
await provideTestInstance({
await Instance.provide({
directory: path.join(tmp.path, "project"),
fn: async () => {
const cfg = await load()
@@ -1894,7 +1883,7 @@ test("project config overrides remote well-known config", async () => {
Layer.provide(fakeAuth),
Layer.provide(emptyAccount),
Layer.provideMerge(infra),
Layer.provide(noopNpm),
Layer.provide(Npm.defaultLayer),
)
try {
@@ -1952,7 +1941,7 @@ test("wellknown URL with trailing slash is normalized", async () => {
Layer.provide(fakeAuth),
Layer.provide(emptyAccount),
Layer.provideMerge(infra),
Layer.provide(noopNpm),
Layer.provide(Npm.defaultLayer),
)
try {
@@ -2107,7 +2096,7 @@ describe("deduplicatePluginOrigins", () => {
},
})
await provideTestInstance({
await Instance.provide({
directory: path.join(tmp.path, "project"),
fn: async () => {
const config = await load()

View File

@@ -1,9 +1,8 @@
import { afterEach, beforeEach, expect, test } from "bun:test"
import path from "path"
import fs from "fs/promises"
import { provideTestInstance, tmpdir } from "../fixture/fixture"
import { tmpdir } from "../fixture/fixture"
import { Instance } from "../../src/project/instance"
import { InstanceRuntime } from "@/project/instance-runtime"
import { TuiConfig } from "../../src/cli/cmd/tui/config/tui"
import { Config } from "@/config/config"
import { Global } from "@opencode-ai/core/global"
@@ -14,10 +13,7 @@ import { CurrentWorkingDirectory } from "@/cli/cmd/tui/config/cwd"
import { ConfigPlugin } from "@/config/plugin"
const wintest = process.platform === "win32" ? test : test.skip
const clear = async (wait = false) => {
await AppRuntime.runPromise(Config.Service.use((svc) => svc.invalidate()))
if (wait) await InstanceRuntime.disposeAllInstances()
}
const clear = (wait = false) => AppRuntime.runPromise(Config.Service.use((svc) => svc.invalidate(wait)))
const load = () => AppRuntime.runPromise(Config.Service.use((svc) => svc.get()))
beforeEach(async () => {
@@ -91,7 +87,7 @@ test("keeps server and tui plugin merge semantics aligned", async () => {
},
})
await provideTestInstance({
await Instance.provide({
directory: tmp.path,
fn: async () => {
const server = await load()

View File

@@ -3,8 +3,9 @@ import { CrossSpawnSpawner } from "@opencode-ai/core/cross-spawn-spawner"
import { $ } from "bun"
import { Context, Deferred, Duration, Effect, Exit, Fiber, Layer } from "effect"
import { InstanceState } from "@/effect/instance-state"
import { InstanceStore } from "../../src/project/instance-store"
import { Instance } from "../../src/project/instance"
import { disposeAllInstances, provideInstance, reloadTestInstance, tmpdirScoped } from "../fixture/fixture"
import { disposeAllInstances, provideInstance, tmpdirScoped } from "../fixture/fixture"
import { testEffect } from "../lib/effect"
const it = testEffect(CrossSpawnSpawner.defaultLayer)
@@ -69,7 +70,7 @@ it.live("InstanceState invalidates on reload", () =>
)
const a = yield* access(state, dir)
yield* Effect.promise(() => reloadTestInstance({ directory: dir }))
yield* Effect.promise(() => InstanceStore.reloadInstance({ directory: dir }))
const b = yield* access(state, dir)
expect(a).not.toBe(b)
@@ -269,7 +270,7 @@ it.live("InstanceState correct after interleaved init and dispose", () =>
const [, b] = yield* Effect.all(
[
Effect.promise(() => reloadTestInstance({ directory: one })),
Effect.promise(() => InstanceStore.reloadInstance({ directory: one })),
Test.use((svc) => svc.get()).pipe(provideInstance(two)),
],
{ concurrency: "unbounded" },

View File

@@ -1,23 +0,0 @@
import { Config } from "@/config/config"
import { emptyConsoleState } from "@/config/console-state"
import { Effect, Layer } from "effect"
export function make(overrides: Partial<Config.Interface> = {}) {
return Config.Service.of({
get: () => Effect.succeed({}),
getGlobal: () => Effect.succeed({}),
getConsoleState: () => Effect.succeed(emptyConsoleState),
update: () => Effect.void,
updateGlobal: (config) => Effect.succeed({ info: config, changed: false }),
invalidate: () => Effect.void,
directories: () => Effect.succeed([]),
waitForDependencies: () => Effect.void,
...overrides,
})
}
export function layer(overrides?: Partial<Config.Interface>) {
return Layer.succeed(Config.Service, make(overrides))
}
export * as TestConfig from "./config"

View File

@@ -1,44 +1,20 @@
import { $ } from "bun"
import * as Observability from "@opencode-ai/core/effect/observability"
import * as fs from "fs/promises"
import os from "os"
import path from "path"
import { Effect, Context, Layer, ManagedRuntime } from "effect"
import { Effect, Context } from "effect"
import type * as PlatformError from "effect/PlatformError"
import type * as Scope from "effect/Scope"
import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process"
import type { Config } from "@/config/config"
import { InstanceRef } from "../../src/effect/instance-ref"
import { InstanceBootstrap } from "../../src/project/bootstrap-service"
import { InstanceRuntime } from "../../src/project/instance-runtime"
import { InstanceStore } from "../../src/project/instance-store"
import { Instance } from "../../src/project/instance"
import { TestLLMServer } from "../lib/llm-server"
const noopBootstrap = Layer.succeed(InstanceBootstrap.Service, InstanceBootstrap.Service.of({ run: Effect.void }))
const testInstanceRuntime = ManagedRuntime.make(
InstanceStore.defaultLayer.pipe(Layer.provide(noopBootstrap), Layer.provideMerge(Observability.layer)),
)
const runTestInstanceStore = <A>(fn: (store: InstanceStore.Interface) => Effect.Effect<A>) =>
testInstanceRuntime.runPromise(InstanceStore.Service.use(fn))
export async function provideTestInstance<R>(input: { directory: string; init?: Effect.Effect<void>; fn: () => R }) {
const ctx = await runTestInstanceStore((store) => store.load({ directory: input.directory, init: input.init }))
try {
return await Instance.restore(ctx, () => input.fn())
} finally {
await runTestInstanceStore((store) => store.dispose(ctx))
}
}
export async function reloadTestInstance(input: { directory: string }) {
return runTestInstanceStore((store) => store.reload(input))
}
export async function disposeAllInstances() {
await Promise.all([InstanceRuntime.disposeAllInstances(), runTestInstanceStore((store) => store.disposeAll())])
}
// Re-export for test ergonomics. The implementation lives next to the runtime
// it consumes; see `InstanceStore.disposeAllInstances` for the rationale.
export { disposeAllInstances } from "../../src/project/instance-store"
// Strip null bytes from paths (defensive fix for CI environment issues)
function sanitizePath(p: string): string {
@@ -153,10 +129,12 @@ export const provideInstance =
(directory: string) =>
<A, E, R>(self: Effect.Effect<A, E, R>): Effect.Effect<A, E, R> =>
Effect.contextWith((services: Context.Context<R>) =>
Effect.promise<A>(async () => {
const ctx = await runTestInstanceStore((store) => store.load({ directory }))
return Instance.restore(ctx, () => Effect.runPromiseWith(services)(self.pipe(Effect.provideService(InstanceRef, ctx))))
}),
Effect.promise<A>(async () =>
Instance.provide({
directory,
fn: () => Effect.runPromiseWith(services)(self.pipe(Effect.provideService(InstanceRef, Instance.current))),
}),
),
)
export function provideTmpdirInstance<A, E, R>(
@@ -170,7 +148,10 @@ export function provideTmpdirInstance<A, E, R>(
yield* Effect.addFinalizer(() =>
provided
? Effect.promise(() =>
runTestInstanceStore((store) => store.load({ directory: path }).pipe(Effect.flatMap((ctx) => store.dispose(ctx)))),
Instance.provide({
directory: path,
fn: () => InstanceStore.disposeInstance(Instance.current),
}),
).pipe(Effect.ignore)
: Effect.void,
)

Some files were not shown because too many files have changed in this diff Show More