mirror of
https://fastgit.cc/https://github.com/anomalyco/opencode
synced 2026-05-02 23:04:07 +08:00
Compare commits
23 Commits
layered-de
...
kit/e2e-se
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
457a4b26fe | ||
|
|
94f71f59a3 | ||
|
|
3eb6508a64 | ||
|
|
6fdb8ab90d | ||
|
|
321bf1f8e1 | ||
|
|
62bd023086 | ||
|
|
cb1a50055c | ||
|
|
65e3348232 | ||
|
|
a6b9f0dac1 | ||
|
|
34f5bdbc99 | ||
|
|
0b4fe14b0a | ||
|
|
7230cd2683 | ||
|
|
a915fe74be | ||
|
|
26d35583c5 | ||
|
|
ae17b416b8 | ||
|
|
8ffadde85c | ||
|
|
3c0ad70653 | ||
|
|
264418c0cd | ||
|
|
fa2c69f09c | ||
|
|
113304a058 | ||
|
|
8c4d49c2bc | ||
|
|
2aa6110c6e | ||
|
|
8b9b9ad31e |
8
.github/workflows/publish.yml
vendored
8
.github/workflows/publish.yml
vendored
@@ -114,7 +114,7 @@ jobs:
|
||||
- build-cli
|
||||
- version
|
||||
runs-on: blacksmith-4vcpu-windows-2025
|
||||
if: github.repository == 'anomalyco/opencode' && github.ref_name != 'beta'
|
||||
if: github.repository == 'anomalyco/opencode'
|
||||
env:
|
||||
AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }}
|
||||
AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }}
|
||||
@@ -213,7 +213,6 @@ jobs:
|
||||
needs:
|
||||
- build-cli
|
||||
- version
|
||||
if: github.ref_name != 'beta'
|
||||
continue-on-error: false
|
||||
env:
|
||||
AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }}
|
||||
@@ -390,7 +389,7 @@ jobs:
|
||||
needs:
|
||||
- build-cli
|
||||
- version
|
||||
if: github.repository == 'anomalyco/opencode' && github.ref_name != 'beta'
|
||||
if: github.repository == 'anomalyco/opencode'
|
||||
continue-on-error: false
|
||||
env:
|
||||
AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }}
|
||||
@@ -591,13 +590,12 @@ jobs:
|
||||
path: packages/opencode/dist
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
if: github.ref_name != 'beta'
|
||||
with:
|
||||
name: opencode-cli-signed-windows
|
||||
path: packages/opencode/dist
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
if: needs.version.outputs.release && github.ref_name != 'beta'
|
||||
if: needs.version.outputs.release
|
||||
with:
|
||||
pattern: latest-yml-*
|
||||
path: /tmp/latest-yml
|
||||
|
||||
5
bun.lock
5
bun.lock
@@ -319,6 +319,7 @@
|
||||
"@actions/core": "1.11.1",
|
||||
"@actions/github": "6.0.1",
|
||||
"@agentclientprotocol/sdk": "0.16.1",
|
||||
"@ai-sdk/alibaba": "1.0.17",
|
||||
"@ai-sdk/amazon-bedrock": "4.0.93",
|
||||
"@ai-sdk/anthropic": "3.0.67",
|
||||
"@ai-sdk/azure": "3.0.49",
|
||||
@@ -707,6 +708,8 @@
|
||||
|
||||
"@agentclientprotocol/sdk": ["@agentclientprotocol/sdk@0.16.1", "", { "peerDependencies": { "zod": "^3.25.0 || ^4.0.0" } }, "sha512-1ad+Sc/0sCtZGHthxxvgEUo5Wsbw16I+aF+YwdiLnPwkZG8KAGUEAPK6LM6Pf69lCyJPt1Aomk1d+8oE3C4ZEw=="],
|
||||
|
||||
"@ai-sdk/alibaba": ["@ai-sdk/alibaba@1.0.17", "", { "dependencies": { "@ai-sdk/openai-compatible": "2.0.41", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-ZbE+U5bWz2JBc5DERLowx5+TKbjGBE93LqKZAWvuEn7HOSQMraxFMZuc0ST335QZJAyfBOzh7m1mPQ+y7EaaoA=="],
|
||||
|
||||
"@ai-sdk/amazon-bedrock": ["@ai-sdk/amazon-bedrock@4.0.93", "", { "dependencies": { "@ai-sdk/anthropic": "3.0.69", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "@smithy/eventstream-codec": "^4.0.1", "@smithy/util-utf8": "^4.0.0", "aws4fetch": "^1.0.20" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-hcXDU8QDwpAzLVTuY932TQVlIij9+iaVTxc5mPGY6yb//JMAAC5hMVhg93IrxlrxWLvMgjezNgoZGwquR+SGnw=="],
|
||||
|
||||
"@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.64", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.21" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-rwLi/Rsuj2pYniQXIrvClHvXDzgM4UQHHnvHTWEF14efnlKclG/1ghpNC+adsRujAbCTr6gRsSbDE2vEqriV7g=="],
|
||||
@@ -5003,6 +5006,8 @@
|
||||
|
||||
"@actions/http-client/undici": ["undici@6.24.1", "", {}, "sha512-sC+b0tB1whOCzbtlx20fx3WgCXwkW627p4EA9uM+/tNNPkSS+eSEld6pAs9nDv7WbY1UUljBMYPtu9BCOrCWKA=="],
|
||||
|
||||
"@ai-sdk/alibaba/@ai-sdk/openai-compatible": ["@ai-sdk/openai-compatible@2.0.41", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-kNAGINk71AlOXx10Dq/PXw4t/9XjdK8uxfpVElRwtSFMdeSiLVt58p9TPx4/FJD+hxZuVhvxYj9r42osxWq79g=="],
|
||||
|
||||
"@ai-sdk/amazon-bedrock/@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.69", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-LshR7X3pFugY0o41G2VKTmg1XoGpSl7uoYWfzk6zjVZLhCfeFiwgpOga+eTV4XY1VVpZwKVqRnkDbIL7K2eH5g=="],
|
||||
|
||||
"@ai-sdk/amazon-bedrock/@smithy/eventstream-codec": ["@smithy/eventstream-codec@4.2.12", "", { "dependencies": { "@aws-crypto/crc32": "5.2.0", "@smithy/types": "^4.13.1", "@smithy/util-hex-encoding": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-FE3bZdEl62ojmy8x4FHqxq2+BuOHlcxiH5vaZ6aqHJr3AIZzwF5jfx8dEiU/X0a8RboyNDjmXjlbr8AdEyLgiA=="],
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
{
|
||||
"nodeModules": {
|
||||
"x86_64-linux": "sha256-fNRQYkucjXr1D61HJRScJpDa6+oBdyhgTBxCu+PE2kQ=",
|
||||
"aarch64-linux": "sha256-V8J6kn2nSdXrplyqi6aIqNlHcVjSxvye+yC/YFO7PF4=",
|
||||
"aarch64-darwin": "sha256-6cLmUJVUycGALCmslXuloVGBSlFOSHRjsWjx7KOW8rg=",
|
||||
"x86_64-darwin": "sha256-kcOSO3NFIJh79ylLotG41ovWLQfH5kh1WYFghUu+4HE="
|
||||
"x86_64-linux": "sha256-g29OM3dy+sZ3ioTs8zjQOK1N+KnNr9ptP9xtdPcdr64=",
|
||||
"aarch64-linux": "sha256-Iu91KwDcV5omkf4Ngny1aYpyCkPLjuoWOVUDOJUhW1k=",
|
||||
"aarch64-darwin": "sha256-bk3G6m+Yo60Ea3Kyglc37QZf5Vm7MLMFcxemjc7HnL0=",
|
||||
"x86_64-darwin": "sha256-y3hooQw13Z3Cu0KFfXYdpkTEeKTyuKd+a/jsXHQLdqA="
|
||||
}
|
||||
}
|
||||
|
||||
@@ -274,7 +274,7 @@ const WorkspaceSessionList = (props: {
|
||||
<div class="relative w-full py-1">
|
||||
<Button
|
||||
variant="ghost"
|
||||
class="flex w-full text-left justify-start text-14-regular text-text-weak pl-9 pr-10"
|
||||
class="flex w-full text-left justify-start text-14-regular text-text-weak pl-2 pr-10"
|
||||
size="large"
|
||||
onClick={(e: MouseEvent) => {
|
||||
props.loadMore()
|
||||
|
||||
@@ -642,10 +642,10 @@ export function MessageTimeline(props: {
|
||||
onClick={props.onResumeScroll}
|
||||
>
|
||||
<div
|
||||
class="flex items-center justify-center w-8 h-6 rounded-[6px] border border-[var(--gray-dark-7)] bg-[color-mix(in_srgb,var(--gray-dark-3)_80%,transparent)] backdrop-blur-[0.75px] transition-colors group-hover:border-[var(--gray-dark-8)] [--icon-base:var(--gray-dark-10)] group-hover:[--icon-base:var(--gray-dark-11)]"
|
||||
class="flex items-center justify-center w-8 h-6 rounded-[6px] border border-border-weaker-base bg-[color-mix(in_srgb,var(--surface-raised-stronger-non-alpha)_80%,transparent)] backdrop-blur-[0.75px] transition-colors group-hover:border-[var(--border-weak-base)] group-hover:[--icon-base:var(--icon-hover)]"
|
||||
style={{
|
||||
"box-shadow":
|
||||
"0 51px 60px 0 rgba(0,0,0,0.13), 0 15.375px 18.088px 0 rgba(0,0,0,0.19), 0 6.386px 7.513px 0 rgba(0,0,0,0.25), 0 2.31px 2.717px 0 rgba(0,0,0,0.38)",
|
||||
"0 51px 60px 0 rgba(0,0,0,0.10), 0 15px 18px 0 rgba(0,0,0,0.12), 0 6.386px 7.513px 0 rgba(0,0,0,0.12), 0 2.31px 2.717px 0 rgba(0,0,0,0.20)",
|
||||
}}
|
||||
>
|
||||
<Icon name="arrow-down-to-line" size="small" />
|
||||
|
||||
@@ -66,7 +66,7 @@ export function createMainWindow(globals: Globals) {
|
||||
y: state.y,
|
||||
width: state.width,
|
||||
height: state.height,
|
||||
show: true,
|
||||
show: false,
|
||||
title: "OpenCode",
|
||||
icon: iconPath(),
|
||||
backgroundColor,
|
||||
@@ -94,6 +94,10 @@ export function createMainWindow(globals: Globals) {
|
||||
wireZoom(win)
|
||||
injectGlobals(win, globals)
|
||||
|
||||
win.once("ready-to-show", () => {
|
||||
win.show()
|
||||
})
|
||||
|
||||
return win
|
||||
}
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
|
||||
Use these rules when writing or migrating Effect code.
|
||||
|
||||
See `specs/effect-migration.md` for the compact pattern reference and examples.
|
||||
See `specs/effect/migration.md` for the compact pattern reference and examples.
|
||||
|
||||
## Core
|
||||
|
||||
@@ -51,7 +51,7 @@ See `specs/effect-migration.md` for the compact pattern reference and examples.
|
||||
|
||||
## Effect.cached for deduplication
|
||||
|
||||
Use `Effect.cached` when multiple concurrent callers should share a single in-flight computation rather than storing `Fiber | undefined` or `Promise | undefined` manually. See `specs/effect-migration.md` for the full pattern.
|
||||
Use `Effect.cached` when multiple concurrent callers should share a single in-flight computation rather than storing `Fiber | undefined` or `Promise | undefined` manually. See `specs/effect/migration.md` for the full pattern.
|
||||
|
||||
## Instance.bind — ALS for native callbacks
|
||||
|
||||
|
||||
@@ -14,18 +14,11 @@
|
||||
"fix-node-pty": "bun run script/fix-node-pty.ts",
|
||||
"upgrade-opentui": "bun run script/upgrade-opentui.ts",
|
||||
"dev": "bun run --conditions=browser ./src/index.ts",
|
||||
"random": "echo 'Random script updated at $(date)' && echo 'Change queued successfully' && echo 'Another change made' && echo 'Yet another change' && echo 'One more change' && echo 'Final change' && echo 'Another final change' && echo 'Yet another final change'",
|
||||
"clean": "echo 'Cleaning up...' && rm -rf node_modules dist",
|
||||
"lint": "echo 'Running lint checks...' && bun test --coverage",
|
||||
"format": "echo 'Formatting code...' && bun run --prettier --write src/**/*.ts",
|
||||
"docs": "echo 'Generating documentation...' && find src -name '*.ts' -exec echo 'Processing: {}' \\;",
|
||||
"deploy": "echo 'Deploying application...' && bun run build && echo 'Deployment completed successfully'",
|
||||
"db": "bun drizzle-kit"
|
||||
},
|
||||
"bin": {
|
||||
"opencode": "./bin/opencode"
|
||||
},
|
||||
"randomField": "this-is-a-random-value-12345",
|
||||
"exports": {
|
||||
"./*": "./src/*.ts"
|
||||
},
|
||||
@@ -83,6 +76,7 @@
|
||||
"@actions/core": "1.11.1",
|
||||
"@actions/github": "6.0.1",
|
||||
"@agentclientprotocol/sdk": "0.16.1",
|
||||
"@ai-sdk/alibaba": "1.0.17",
|
||||
"@ai-sdk/amazon-bedrock": "4.0.93",
|
||||
"@ai-sdk/anthropic": "3.0.67",
|
||||
"@ai-sdk/azure": "3.0.49",
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import { AppRuntime } from "@/effect/app-runtime"
|
||||
|
||||
const dir = process.env.OPENCODE_E2E_PROJECT_DIR ?? process.cwd()
|
||||
const title = process.env.OPENCODE_E2E_SESSION_TITLE ?? "E2E Session"
|
||||
const text = process.env.OPENCODE_E2E_MESSAGE ?? "Seeded for UI e2e"
|
||||
@@ -20,10 +22,10 @@ const seed = async () => {
|
||||
try {
|
||||
await Instance.provide({
|
||||
directory: dir,
|
||||
init: InstanceBootstrap,
|
||||
init: () => AppRuntime.runPromise(InstanceBootstrap),
|
||||
fn: async () => {
|
||||
await Config.waitForDependencies()
|
||||
await ToolRegistry.ids()
|
||||
await AppRuntime.runPromise(ToolRegistry.Service.use((svc) => svc.ids()))
|
||||
|
||||
const session = await Session.create({ title })
|
||||
const messageID = MessageID.ascending()
|
||||
@@ -54,6 +56,7 @@ const seed = async () => {
|
||||
})
|
||||
} finally {
|
||||
await Instance.disposeAll().catch(() => {})
|
||||
await AppRuntime.dispose().catch(() => {})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
137
packages/opencode/specs/effect/http-api.md
Normal file
137
packages/opencode/specs/effect/http-api.md
Normal file
@@ -0,0 +1,137 @@
|
||||
# HttpApi migration
|
||||
|
||||
Practical notes for an eventual migration of `packages/opencode` server routes from the current Hono handlers to Effect `HttpApi`, either as a full replacement or as a parallel surface.
|
||||
|
||||
## Goal
|
||||
|
||||
Use Effect `HttpApi` where it gives us a better typed contract for:
|
||||
|
||||
- route definition
|
||||
- request decoding and validation
|
||||
- typed success and error responses
|
||||
- OpenAPI generation
|
||||
- handler composition inside Effect
|
||||
|
||||
This should be treated as a later-stage HTTP boundary migration, not a prerequisite for ongoing service, route-handler, or schema work.
|
||||
|
||||
## Core model
|
||||
|
||||
`HttpApi` is definition-first.
|
||||
|
||||
- `HttpApi` is the root API
|
||||
- `HttpApiGroup` groups related endpoints
|
||||
- `HttpApiEndpoint` defines a single route and its request / response schemas
|
||||
- handlers are implemented separately from the contract
|
||||
|
||||
This is a better fit once route inputs and outputs are already moving toward Effect Schema-first models.
|
||||
|
||||
## Why it is relevant here
|
||||
|
||||
The current route-effectification work is already pushing handlers toward:
|
||||
|
||||
- one `AppRuntime.runPromise(Effect.gen(...))` body
|
||||
- yielding services from context
|
||||
- using typed Effect errors instead of Promise wrappers
|
||||
|
||||
That work is a good prerequisite for `HttpApi`. Once the handler body is already a composed Effect, the remaining migration is mostly about replacing the Hono route declaration and validator layer.
|
||||
|
||||
## What HttpApi gives us
|
||||
|
||||
### Contracts
|
||||
|
||||
Request params, query, payload, success payloads, and typed error payloads are declared in one place using Effect Schema.
|
||||
|
||||
### Validation and decoding
|
||||
|
||||
Incoming data is decoded through Effect Schema instead of hand-maintained Zod validators per route.
|
||||
|
||||
### OpenAPI
|
||||
|
||||
`HttpApi` can derive OpenAPI from the API definition, which overlaps with the current `describeRoute(...)` and `resolver(...)` pattern.
|
||||
|
||||
### Typed errors
|
||||
|
||||
`Schema.TaggedErrorClass` maps naturally to endpoint error contracts.
|
||||
|
||||
## Likely fit for opencode
|
||||
|
||||
Best fit first:
|
||||
|
||||
- JSON request / response endpoints
|
||||
- route groups that already mostly delegate into services
|
||||
- endpoints whose request and response models can be defined with Effect Schema
|
||||
|
||||
Harder / later fit:
|
||||
|
||||
- SSE endpoints
|
||||
- websocket endpoints
|
||||
- streaming handlers
|
||||
- routes with heavy Hono-specific middleware assumptions
|
||||
|
||||
## Current blockers and gaps
|
||||
|
||||
### Schema split
|
||||
|
||||
Many route boundaries still use Zod-first validators. That does not block all experimentation, but full `HttpApi` adoption is easier after the domain and boundary types are more consistently Schema-first with `.zod` compatibility only where needed.
|
||||
|
||||
### Mixed handler styles
|
||||
|
||||
Many current `server/instance/*.ts` handlers still call async facades directly. Migrating those to composed `Effect.gen(...)` handlers is the low-risk step to do first.
|
||||
|
||||
### Non-JSON routes
|
||||
|
||||
The server currently includes SSE, websocket, and streaming-style endpoints. Those should not be the first `HttpApi` targets.
|
||||
|
||||
### Existing Hono integration
|
||||
|
||||
The current server composition, middleware, and docs flow are Hono-centered today. That suggests a parallel or incremental adoption plan is safer than a flag day rewrite.
|
||||
|
||||
## Recommended strategy
|
||||
|
||||
### 1. Finish the prerequisites first
|
||||
|
||||
- continue route-handler effectification in `server/instance/*.ts`
|
||||
- continue schema migration toward Effect Schema-first DTOs and errors
|
||||
- keep removing service facades
|
||||
|
||||
### 2. Start with one parallel group
|
||||
|
||||
Introduce one small `HttpApi` group for plain JSON endpoints only. Good initial candidates are the least stateful endpoints in:
|
||||
|
||||
- `server/instance/question.ts`
|
||||
- `server/instance/provider.ts`
|
||||
- `server/instance/permission.ts`
|
||||
|
||||
Avoid `session.ts`, SSE, websocket, and TUI-facing routes first.
|
||||
|
||||
### 3. Reuse existing services
|
||||
|
||||
Do not re-architect business logic during the HTTP migration. `HttpApi` handlers should call the same Effect services already used by the Hono handlers.
|
||||
|
||||
### 4. Run in parallel before replacing
|
||||
|
||||
Prefer mounting an experimental `HttpApi` surface alongside the existing Hono routes first. That lowers migration risk and lets us compare:
|
||||
|
||||
- handler ergonomics
|
||||
- OpenAPI output
|
||||
- auth and middleware integration
|
||||
- test ergonomics
|
||||
|
||||
### 5. Migrate JSON route groups gradually
|
||||
|
||||
If the parallel slice works well, migrate additional JSON route groups one at a time. Leave streaming-style endpoints on Hono until there is a clear reason to move them.
|
||||
|
||||
## Proposed first steps
|
||||
|
||||
- [ ] add one small spike that defines an `HttpApi` group for a simple JSON route set
|
||||
- [ ] use Effect Schema request / response types for that slice
|
||||
- [ ] keep the underlying service calls identical to the current handlers
|
||||
- [ ] compare generated OpenAPI against the current Hono/OpenAPI setup
|
||||
- [ ] document how auth, instance lookup, and error mapping would compose in the new stack
|
||||
- [ ] decide after the spike whether `HttpApi` should stay parallel, replace only some groups, or become the long-term default
|
||||
|
||||
## Rule of thumb
|
||||
|
||||
Do not start with the hardest route file.
|
||||
|
||||
If `HttpApi` is adopted here, it should arrive after the handler body is already Effect-native and after the relevant request / response models have moved to Effect Schema.
|
||||
@@ -178,7 +178,9 @@ That is fine for leaf files like `schema.ts`. Keep the service surface in the ow
|
||||
|
||||
## Migration checklist
|
||||
|
||||
Fully migrated (single namespace, InstanceState where needed, flattened facade):
|
||||
Service-shape migrated (single namespace, traced methods, `InstanceState` where needed).
|
||||
|
||||
This checklist is only about the service shape migration. Many of these services still keep `makeRuntime(...)` plus async facade exports; that facade-removal phase is tracked separately in [Destroying the facades](#destroying-the-facades).
|
||||
|
||||
- [x] `Account` — `account/index.ts`
|
||||
- [x] `Agent` — `agent/agent.ts`
|
||||
@@ -221,59 +223,16 @@ Fully migrated (single namespace, InstanceState where needed, flattened facade):
|
||||
- [x] `Provider` — `provider/provider.ts`
|
||||
- [x] `Storage` — `storage/storage.ts`
|
||||
- [x] `ShareNext` — `share/share-next.ts`
|
||||
|
||||
Still open:
|
||||
|
||||
- [x] `SessionTodo` — `session/todo.ts`
|
||||
- [ ] `SyncEvent` — `sync/index.ts`
|
||||
- [ ] `Workspace` — `control-plane/workspace.ts`
|
||||
|
||||
## Tool interface → Effect
|
||||
Still open at the service-shape level:
|
||||
|
||||
`Tool.Def.execute` and `Tool.Info.init` already return `Effect` on this branch. Tool definitions should now stay Effect-native all the way through initialization instead of using Promise-returning init callbacks. Tools can still use lazy init callbacks when they need instance-bound state at init time, but those callbacks should return `Effect`, not `Promise`. Remaining work is:
|
||||
- [ ] `SyncEvent` — `sync/index.ts` (deferred pending sync with James)
|
||||
- [ ] `Workspace` — `control-plane/workspace.ts` (deferred pending sync with James)
|
||||
|
||||
1. Migrate each tool body to return Effects
|
||||
2. Keep `Tool.define()` inputs Effect-native
|
||||
3. Update remaining callers to `yield*` tool initialization instead of `await`ing
|
||||
## Tool migration
|
||||
|
||||
### Tool migration details
|
||||
|
||||
With `Tool.Info.init()` now effectful, use this transitional pattern for migrated tools that still need Promise-based boundaries internally:
|
||||
|
||||
- `Tool.defineEffect(...)` should `yield*` the services the tool depends on and close over them in the returned tool definition.
|
||||
- Keep the bridge at the Promise boundary only inside the tool body when required by external APIs. Do not return Promise-based init callbacks from `Tool.define()`.
|
||||
- If a tool starts requiring new services, wire them into `ToolRegistry.defaultLayer` so production callers resolve the same dependencies as tests.
|
||||
|
||||
Tool tests should use the existing Effect helpers in `packages/opencode/test/lib/effect.ts`:
|
||||
|
||||
- Use `testEffect(...)` / `it.live(...)` instead of creating fake local wrappers around effectful tools.
|
||||
- Yield the real tool export, then initialize it: `const info = yield* ReadTool`, `const tool = yield* info.init()`.
|
||||
- Run tests inside a real instance with `provideTmpdirInstance(...)` or `provideInstance(tmpdirScoped(...))` so instance-scoped services resolve exactly as they do in production.
|
||||
|
||||
This keeps migrated tool tests aligned with the production service graph today, and makes the eventual `Tool.Info` → `Effect` cleanup mostly mechanical later.
|
||||
|
||||
Individual tools, ordered by value:
|
||||
|
||||
- [ ] `apply_patch.ts` — HIGH: multi-step orchestration, error accumulation, Bus events
|
||||
- [ ] `bash.ts` — HIGH: shell orchestration, quoting, timeout handling, output capture
|
||||
- [x] `read.ts` — HIGH: streaming I/O, readline, binary detection → FileSystem + Stream
|
||||
- [ ] `edit.ts` — HIGH: multi-step diff/format/publish pipeline, FileWatcher lock
|
||||
- [ ] `grep.ts` — MEDIUM: spawns ripgrep → ChildProcessSpawner, timeout handling
|
||||
- [ ] `write.ts` — MEDIUM: permission checks, diagnostics polling, Bus events
|
||||
- [ ] `codesearch.ts` — MEDIUM: HTTP + SSE + manual timeout → HttpClient + Effect.timeout
|
||||
- [ ] `webfetch.ts` — MEDIUM: fetch with UA retry, size limits → HttpClient
|
||||
- [ ] `websearch.ts` — MEDIUM: MCP over HTTP → HttpClient
|
||||
- [ ] `batch.ts` — MEDIUM: parallel execution, per-call error recovery → Effect.all
|
||||
- [ ] `task.ts` — MEDIUM: task state management
|
||||
- [ ] `ls.ts` — MEDIUM: bounded directory listing over ripgrep-backed traversal
|
||||
- [ ] `multiedit.ts` — MEDIUM: sequential edit orchestration over `edit.ts`
|
||||
- [ ] `glob.ts` — LOW: simple async generator
|
||||
- [ ] `lsp.ts` — LOW: dispatch switch over LSP operations
|
||||
- [ ] `question.ts` — LOW: prompt wrapper
|
||||
- [ ] `skill.ts` — LOW: skill tool adapter
|
||||
- [ ] `todo.ts` — LOW: todo persistence wrapper
|
||||
- [ ] `invalid.ts` — LOW: invalid-tool fallback
|
||||
- [ ] `plan.ts` — LOW: plan file operations
|
||||
Tool-specific migration guidance and checklist live in `tools.md`.
|
||||
|
||||
## Effect service adoption in already-migrated code
|
||||
|
||||
@@ -281,27 +240,19 @@ Some already-effectified areas still use raw `Filesystem.*` or `Process.spawn` i
|
||||
|
||||
### `Filesystem.*` → `AppFileSystem.Service` (yield in layer)
|
||||
|
||||
- [ ] `file/index.ts` — 1 remaining `Filesystem.readText()` call in untracked diff handling
|
||||
- [ ] `config/config.ts` — 5 remaining `Filesystem.*` calls in `installDependencies()`
|
||||
- [ ] `provider/provider.ts` — 1 remaining `Filesystem.readJson()` call for recent model state
|
||||
- [x] `config/config.ts` — `installDependencies()` now uses `AppFileSystem`
|
||||
- [x] `provider/provider.ts` — recent model state now reads via `AppFileSystem.Service`
|
||||
|
||||
### `Process.spawn` → `ChildProcessSpawner` (yield in layer)
|
||||
|
||||
- [ ] `format/formatter.ts` — 2 remaining `Process.spawn()` checks (`air`, `uv`)
|
||||
- [x] `format/formatter.ts` — direct `Process.spawn()` checks removed (`air`, `uv`)
|
||||
- [ ] `lsp/server.ts` — multiple `Process.spawn()` installs/download helpers
|
||||
|
||||
## Filesystem consolidation
|
||||
|
||||
`util/filesystem.ts` (raw fs wrapper) is currently imported by **34 files**. The effectified `AppFileSystem` service (`filesystem/index.ts`) is currently imported by **15 files**. As services and tools are effectified, they should switch from `Filesystem.*` to yielding `AppFileSystem.Service` — this happens naturally during each migration, not as a separate effort.
|
||||
`util/filesystem.ts` is still used widely across `src/`, and raw `fs` / `fs/promises` imports still exist in multiple tooling and infrastructure files. As services and tools are effectified, they should switch from `Filesystem.*` to yielding `AppFileSystem.Service` where possible — this should happen naturally during each migration, not as a separate sweep.
|
||||
|
||||
Similarly, **21 files** still import raw `fs` or `fs/promises` directly. These should migrate to `AppFileSystem` or `Filesystem.*` as they're touched.
|
||||
|
||||
Current raw fs users that will convert during tool migration:
|
||||
|
||||
- `tool/read.ts` — fs.createReadStream, readline
|
||||
- `tool/apply_patch.ts` — fs/promises
|
||||
- `file/ripgrep.ts` — fs/promises
|
||||
- `patch/index.ts` — fs, fs/promises
|
||||
Tool-specific filesystem cleanup notes live in `tools.md`.
|
||||
|
||||
## Primitives & utilities
|
||||
|
||||
@@ -312,7 +263,9 @@ Current raw fs users that will convert during tool migration:
|
||||
|
||||
## Destroying the facades
|
||||
|
||||
Every service currently exports async facade functions at the bottom of its namespace — `export async function read(...) { return runPromise(...) }` — backed by a per-service `makeRuntime`. These exist because cyclic imports used to force each service to build its own independent runtime. Now that the layer DAG is acyclic and `AppRuntime` (`src/effect/app-runtime.ts`) composes everything into one `ManagedRuntime`, we're removing them.
|
||||
This phase is still broadly open. As of 2026-04-11 there are still 31 `makeRuntime(...)` call sites under `src/`, and many service namespaces still export async facade helpers like `export async function read(...) { return runPromise(...) }`.
|
||||
|
||||
These facades exist because cyclic imports used to force each service to build its own independent runtime. Now that the layer DAG is acyclic and `AppRuntime` (`src/effect/app-runtime.ts`) composes everything into one `ManagedRuntime`, we're removing them.
|
||||
|
||||
### Process
|
||||
|
||||
@@ -341,47 +294,14 @@ For each service, the migration is roughly:
|
||||
- `ShareNext` — migrated 2026-04-11. Swapped remaining async callers to `AppRuntime.runPromise(ShareNext.Service.use(...))`, removed the `makeRuntime(...)` facade, and kept instance bootstrap on the shared app runtime.
|
||||
- `SessionTodo` — migrated 2026-04-10. Already matched the target service shape in `session/todo.ts`: single namespace, traced Effect methods, and no `makeRuntime(...)` facade remained; checklist updated to reflect the completed migration.
|
||||
- `Storage` — migrated 2026-04-10. One production caller (`Session.diff`) and all storage.test.ts tests converted to effectful style. Facades and `makeRuntime` removed.
|
||||
- `SessionRunState` — migrated 2026-04-11. Single caller in `server/routes/session.ts` converted; facade removed.
|
||||
- `Account` — migrated 2026-04-11. Callers in `server/routes/experimental.ts` and `cli/cmd/account.ts` converted; facade removed.
|
||||
- `SessionRunState` — migrated 2026-04-11. Single caller in `server/instance/session.ts` converted; facade removed.
|
||||
- `Account` — migrated 2026-04-11. Callers in `server/instance/experimental.ts` and `cli/cmd/account.ts` converted; facade removed.
|
||||
- `Instruction` — migrated 2026-04-11. Test-only callers converted; facade removed.
|
||||
- `FileTime` — migrated 2026-04-11. Test-only callers converted; facade removed.
|
||||
- `FileWatcher` — migrated 2026-04-11. Callers in `project/bootstrap.ts` and test converted; facade removed.
|
||||
- `Question` — migrated 2026-04-11. Callers in `server/routes/question.ts` and test converted; facade removed.
|
||||
- `Question` — migrated 2026-04-11. Callers in `server/instance/question.ts` and test converted; facade removed.
|
||||
- `Truncate` — migrated 2026-04-11. Caller in `tool/tool.ts` and test converted; facade removed.
|
||||
|
||||
## Route handler effectification
|
||||
|
||||
Route handlers should wrap their entire body in a single `AppRuntime.runPromise(Effect.gen(...))` call, yielding services from context rather than calling facades one-by-one. This eliminates multiple `runPromise` round-trips and lets handlers compose naturally.
|
||||
|
||||
```ts
|
||||
// Before — one facade call per service
|
||||
;async (c) => {
|
||||
await SessionRunState.assertNotBusy(id)
|
||||
await Session.removeMessage({ sessionID: id, messageID })
|
||||
return c.json(true)
|
||||
}
|
||||
|
||||
// After — one Effect.gen, yield services from context
|
||||
;async (c) => {
|
||||
await AppRuntime.runPromise(
|
||||
Effect.gen(function* () {
|
||||
const state = yield* SessionRunState.Service
|
||||
const session = yield* Session.Service
|
||||
yield* state.assertNotBusy(id)
|
||||
yield* session.removeMessage({ sessionID: id, messageID })
|
||||
}),
|
||||
)
|
||||
return c.json(true)
|
||||
}
|
||||
```
|
||||
|
||||
When migrating, always use `{ concurrency: "unbounded" }` with `Effect.all` — route handlers should run independent service calls in parallel, not sequentially.
|
||||
|
||||
Route files to convert (each handler that calls facades should be wrapped):
|
||||
|
||||
- [ ] `server/routes/session.ts` — heaviest; uses Session, SessionPrompt, SessionRevert, SessionCompaction, SessionShare, SessionSummary, SessionRunState, Agent, Permission, Bus
|
||||
- [ ] `server/routes/global.ts` — uses Config, Project, Provider, Vcs, Snapshot, Agent
|
||||
- [ ] `server/routes/provider.ts` — uses Provider, Auth, Config
|
||||
- [ ] `server/routes/question.ts` — uses Question
|
||||
- [ ] `server/routes/pty.ts` — uses Pty
|
||||
- [ ] `server/routes/experimental.ts` — uses Account, ToolRegistry, Agent, MCP, Config
|
||||
Route-handler migration guidance and checklist live in `routes.md`.
|
||||
66
packages/opencode/specs/effect/routes.md
Normal file
66
packages/opencode/specs/effect/routes.md
Normal file
@@ -0,0 +1,66 @@
|
||||
# Route handler effectification
|
||||
|
||||
Practical reference for converting server route handlers in `packages/opencode` to a single `AppRuntime.runPromise(Effect.gen(...))` body.
|
||||
|
||||
## Goal
|
||||
|
||||
Route handlers should wrap their entire body in a single `AppRuntime.runPromise(Effect.gen(...))` call, yielding services from context rather than calling facades one-by-one.
|
||||
|
||||
This eliminates multiple `runPromise` round-trips and lets handlers compose naturally.
|
||||
|
||||
```ts
|
||||
// Before - one facade call per service
|
||||
;async (c) => {
|
||||
await SessionRunState.assertNotBusy(id)
|
||||
await Session.removeMessage({ sessionID: id, messageID })
|
||||
return c.json(true)
|
||||
}
|
||||
|
||||
// After - one Effect.gen, yield services from context
|
||||
;async (c) => {
|
||||
await AppRuntime.runPromise(
|
||||
Effect.gen(function* () {
|
||||
const state = yield* SessionRunState.Service
|
||||
const session = yield* Session.Service
|
||||
yield* state.assertNotBusy(id)
|
||||
yield* session.removeMessage({ sessionID: id, messageID })
|
||||
}),
|
||||
)
|
||||
return c.json(true)
|
||||
}
|
||||
```
|
||||
|
||||
## Rules
|
||||
|
||||
- Wrap the whole handler body in one `AppRuntime.runPromise(Effect.gen(...))` call when the handler is service-heavy.
|
||||
- Yield services from context instead of calling async facades repeatedly.
|
||||
- When independent service calls can run in parallel, use `Effect.all(..., { concurrency: "unbounded" })`.
|
||||
- Prefer one composed Effect body over multiple separate `runPromise(...)` calls in the same handler.
|
||||
|
||||
## Current route files
|
||||
|
||||
Current instance route files live under `src/server/instance`, not `server/routes`.
|
||||
|
||||
The main migration targets are:
|
||||
|
||||
- [ ] `server/instance/session.ts` — heaviest; still has many direct facade calls for Session, SessionPrompt, SessionRevert, SessionCompaction, SessionShare, SessionSummary, Agent, Bus
|
||||
- [ ] `server/instance/global.ts` — still has direct facade calls for Config and instance lifecycle actions
|
||||
- [ ] `server/instance/provider.ts` — still has direct facade calls for Config and Provider
|
||||
- [ ] `server/instance/question.ts` — partially converted; still worth tracking here until it consistently uses the composed style
|
||||
- [ ] `server/instance/pty.ts` — still calls Pty facades directly
|
||||
- [ ] `server/instance/experimental.ts` — mixed state; some handlers are already composed, others still use facades
|
||||
|
||||
Additional route files that still participate in the migration:
|
||||
|
||||
- [ ] `server/instance/index.ts` — Vcs, Agent, Skill, LSP, Format
|
||||
- [ ] `server/instance/file.ts` — Ripgrep, File, LSP
|
||||
- [ ] `server/instance/mcp.ts` — MCP facade-heavy
|
||||
- [ ] `server/instance/permission.ts` — Permission
|
||||
- [ ] `server/instance/workspace.ts` — Workspace
|
||||
- [ ] `server/instance/tui.ts` — Bus and Session
|
||||
- [ ] `server/instance/middleware.ts` — Session and Workspace lookups
|
||||
|
||||
## Notes
|
||||
|
||||
- Some handlers already use `AppRuntime.runPromise(Effect.gen(...))` in isolated places. Keep pushing those files toward one consistent style.
|
||||
- Route conversion is closely tied to facade removal. As services lose `makeRuntime`-backed async exports, route handlers should switch to yielding the service directly.
|
||||
99
packages/opencode/specs/effect/schema.md
Normal file
99
packages/opencode/specs/effect/schema.md
Normal file
@@ -0,0 +1,99 @@
|
||||
# Schema migration
|
||||
|
||||
Practical reference for migrating data types in `packages/opencode` from Zod-first definitions to Effect Schema with Zod compatibility shims.
|
||||
|
||||
## Goal
|
||||
|
||||
Use Effect Schema as the source of truth for domain models, IDs, inputs, outputs, and typed errors.
|
||||
|
||||
Keep Zod available at existing HTTP, tool, and compatibility boundaries by exposing a `.zod` field derived from the Effect schema.
|
||||
|
||||
## Preferred shapes
|
||||
|
||||
### Data objects
|
||||
|
||||
Use `Schema.Class` for structured data.
|
||||
|
||||
```ts
|
||||
export class Info extends Schema.Class<Info>("Foo.Info")({
|
||||
id: FooID,
|
||||
name: Schema.String,
|
||||
enabled: Schema.Boolean,
|
||||
}) {
|
||||
static readonly zod = zod(Info)
|
||||
}
|
||||
```
|
||||
|
||||
If the class cannot reference itself cleanly during initialization, use the existing two-step pattern:
|
||||
|
||||
```ts
|
||||
const _Info = Schema.Struct({
|
||||
id: FooID,
|
||||
name: Schema.String,
|
||||
})
|
||||
|
||||
export const Info = Object.assign(_Info, {
|
||||
zod: zod(_Info),
|
||||
})
|
||||
```
|
||||
|
||||
### Errors
|
||||
|
||||
Use `Schema.TaggedErrorClass` for domain errors.
|
||||
|
||||
```ts
|
||||
export class NotFoundError extends Schema.TaggedErrorClass<NotFoundError>()("FooNotFoundError", {
|
||||
id: FooID,
|
||||
}) {}
|
||||
```
|
||||
|
||||
### IDs and branded leaf types
|
||||
|
||||
Keep branded/schema-backed IDs as Effect schemas and expose `static readonly zod` for compatibility when callers still expect Zod.
|
||||
|
||||
## Compatibility rule
|
||||
|
||||
During migration, route validators, tool parameters, and any existing Zod-based boundary should consume the derived `.zod` schema instead of maintaining a second hand-written Zod schema.
|
||||
|
||||
The default should be:
|
||||
|
||||
- Effect Schema owns the type
|
||||
- `.zod` exists only as a compatibility surface
|
||||
- new domain models should not start Zod-first unless there is a concrete boundary-specific need
|
||||
|
||||
## When Zod can stay
|
||||
|
||||
It is fine to keep a Zod-native schema temporarily when:
|
||||
|
||||
- the type is only used at an HTTP or tool boundary
|
||||
- the validator depends on Zod-only transforms or behavior not yet covered by `zod()`
|
||||
- the migration would force unrelated churn across a large call graph
|
||||
|
||||
When this happens, prefer leaving a short note or TODO rather than silently creating a parallel schema source of truth.
|
||||
|
||||
## Ordering
|
||||
|
||||
Migrate in this order:
|
||||
|
||||
1. Shared leaf models and `schema.ts` files
|
||||
2. Exported `Info`, `Input`, `Output`, and DTO types
|
||||
3. Tagged domain errors
|
||||
4. Service-local internal models
|
||||
5. Route and tool boundary validators that can switch to `.zod`
|
||||
|
||||
This keeps shared types canonical first and makes boundary updates mostly mechanical.
|
||||
|
||||
## Checklist
|
||||
|
||||
- [ ] Shared `schema.ts` leaf models are Effect Schema-first
|
||||
- [ ] Exported `Info` / `Input` / `Output` types use `Schema.Class` where appropriate
|
||||
- [ ] Domain errors use `Schema.TaggedErrorClass`
|
||||
- [ ] Migrated types expose `.zod` for back compatibility
|
||||
- [ ] Route and tool validators consume derived `.zod` instead of duplicate Zod definitions
|
||||
- [ ] New domain models default to Effect Schema first
|
||||
|
||||
## Notes
|
||||
|
||||
- Use `@/util/effect-zod` for all Schema -> Zod conversion.
|
||||
- Prefer one canonical schema definition. Avoid maintaining parallel Zod and Effect definitions for the same domain type.
|
||||
- Keep the migration incremental. Converting the domain model first is more valuable than converting every boundary in the same change.
|
||||
96
packages/opencode/specs/effect/tools.md
Normal file
96
packages/opencode/specs/effect/tools.md
Normal file
@@ -0,0 +1,96 @@
|
||||
# Tool migration
|
||||
|
||||
Practical reference for the current tool-migration state in `packages/opencode`.
|
||||
|
||||
## Status
|
||||
|
||||
`Tool.Def.execute` and `Tool.Info.init` already return `Effect` on this branch, and the built-in tool surface is now largely on the target shape.
|
||||
|
||||
The current exported tools in `src/tool` all use `Tool.define(...)` with Effect-based initialization, and nearly all of them already build their tool body with `Effect.gen(...)` and `Effect.fn(...)`.
|
||||
|
||||
So the remaining work is no longer "convert tools to Effect at all". The remaining work is mostly:
|
||||
|
||||
1. remove Promise and raw platform bridges inside individual tool bodies
|
||||
2. swap tool internals to Effect-native services like `AppFileSystem`, `HttpClient`, and `ChildProcessSpawner`
|
||||
3. keep tests and callers aligned with `yield* info.init()` and real service graphs
|
||||
|
||||
## Current shape
|
||||
|
||||
`Tool.define(...)` is already the Effect-native helper here.
|
||||
|
||||
- `init` is an `Effect`
|
||||
- `info.init()` returns an `Effect`
|
||||
- `execute(...)` returns an `Effect`
|
||||
|
||||
That means a tool does not need a separate `Tool.defineEffect(...)` helper to count as migrated. A tool is effectively migrated when its init and execute path stay Effect-native, even if some internals still bridge to Promise-based or raw APIs.
|
||||
|
||||
## Tests
|
||||
|
||||
Tool tests should use the existing Effect helpers in `packages/opencode/test/lib/effect.ts`:
|
||||
|
||||
- Use `testEffect(...)` / `it.live(...)` instead of creating fake local wrappers around effectful tools.
|
||||
- Yield the real tool export, then initialize it: `const info = yield* ReadTool`, `const tool = yield* info.init()`.
|
||||
- Run tests inside a real instance with `provideTmpdirInstance(...)` or `provideInstance(tmpdirScoped(...))` so instance-scoped services resolve exactly as they do in production.
|
||||
|
||||
This keeps tool tests aligned with the production service graph and makes follow-up cleanup mostly mechanical.
|
||||
|
||||
## Exported tools
|
||||
|
||||
These exported tool definitions already exist in `src/tool` and are on the current Effect-native `Tool.define(...)` path:
|
||||
|
||||
- [x] `apply_patch.ts`
|
||||
- [x] `bash.ts`
|
||||
- [x] `codesearch.ts`
|
||||
- [x] `edit.ts`
|
||||
- [x] `glob.ts`
|
||||
- [x] `grep.ts`
|
||||
- [x] `invalid.ts`
|
||||
- [x] `ls.ts`
|
||||
- [x] `lsp.ts`
|
||||
- [x] `multiedit.ts`
|
||||
- [x] `plan.ts`
|
||||
- [x] `question.ts`
|
||||
- [x] `read.ts`
|
||||
- [x] `skill.ts`
|
||||
- [x] `task.ts`
|
||||
- [x] `todo.ts`
|
||||
- [x] `webfetch.ts`
|
||||
- [x] `websearch.ts`
|
||||
- [x] `write.ts`
|
||||
|
||||
Notes:
|
||||
|
||||
- `batch.ts` is no longer a current tool file and should not be tracked here.
|
||||
- `truncate.ts` is an Effect service used by tools, not a tool definition itself.
|
||||
- `mcp-exa.ts`, `external-directory.ts`, and `schema.ts` are support modules, not standalone tool definitions.
|
||||
|
||||
## Follow-up cleanup
|
||||
|
||||
Most exported tools are already on the intended Effect-native shape. The remaining cleanup is narrower than the old checklist implied.
|
||||
|
||||
Current spot cleanups worth tracking:
|
||||
|
||||
- [ ] `read.ts` — still bridges to Node stream / `readline` helpers and Promise-based binary detection
|
||||
- [ ] `bash.ts` — already uses Effect child-process primitives; only keep tracking shell-specific platform bridges and parser/loading details as they come up
|
||||
- [ ] `webfetch.ts` — already uses `HttpClient`; remaining work is limited to smaller boundary helpers like HTML text extraction
|
||||
- [ ] `file/ripgrep.ts` — adjacent to tool migration; still has raw fs/process usage that affects `grep.ts` and `ls.ts`
|
||||
- [ ] `patch/index.ts` — adjacent to tool migration; still has raw fs usage behind patch application
|
||||
|
||||
Notable items that are already effectively on the target path and do not need separate migration bullets right now:
|
||||
|
||||
- `apply_patch.ts`
|
||||
- `grep.ts`
|
||||
- `write.ts`
|
||||
- `codesearch.ts`
|
||||
- `websearch.ts`
|
||||
- `ls.ts`
|
||||
- `multiedit.ts`
|
||||
- `edit.ts`
|
||||
|
||||
## Filesystem notes
|
||||
|
||||
Current raw fs users that still appear relevant here:
|
||||
|
||||
- `tool/read.ts` — `fs.createReadStream`, `readline`
|
||||
- `file/ripgrep.ts` — `fs/promises`
|
||||
- `patch/index.ts` — `fs`, `fs/promises`
|
||||
@@ -398,13 +398,11 @@ export namespace Agent {
|
||||
}),
|
||||
)
|
||||
|
||||
export const defaultLayer = Layer.suspend(() =>
|
||||
layer.pipe(
|
||||
Layer.provide(Provider.defaultLayer),
|
||||
Layer.provide(Auth.defaultLayer),
|
||||
Layer.provide(Config.defaultLayer),
|
||||
Layer.provide(Skill.defaultLayer),
|
||||
),
|
||||
export const defaultLayer = layer.pipe(
|
||||
Layer.provide(Provider.defaultLayer),
|
||||
Layer.provide(Auth.defaultLayer),
|
||||
Layer.provide(Config.defaultLayer),
|
||||
Layer.provide(Skill.defaultLayer),
|
||||
)
|
||||
|
||||
const { runPromise } = makeRuntime(Service, defaultLayer)
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import { AppRuntime } from "@/effect/app-runtime"
|
||||
import { InstanceBootstrap } from "../project/bootstrap"
|
||||
import { Instance } from "../project/instance"
|
||||
|
||||
export async function bootstrap<T>(directory: string, cb: () => Promise<T>) {
|
||||
return Instance.provide({
|
||||
directory,
|
||||
init: InstanceBootstrap,
|
||||
init: () => AppRuntime.runPromise(InstanceBootstrap),
|
||||
fn: async () => {
|
||||
try {
|
||||
const result = await cb()
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { EOL } from "os"
|
||||
import { AppRuntime } from "../../../effect/app-runtime"
|
||||
import { Ripgrep } from "../../../file/ripgrep"
|
||||
import { Instance } from "../../../project/instance"
|
||||
import { bootstrap } from "../../bootstrap"
|
||||
@@ -76,12 +77,18 @@ const SearchCommand = cmd({
|
||||
description: "Limit number of results",
|
||||
}),
|
||||
async handler(args) {
|
||||
const results = await Ripgrep.search({
|
||||
cwd: process.cwd(),
|
||||
pattern: args.pattern,
|
||||
glob: args.glob as string[] | undefined,
|
||||
limit: args.limit,
|
||||
await bootstrap(process.cwd(), async () => {
|
||||
const results = await AppRuntime.runPromise(
|
||||
Ripgrep.Service.use((svc) =>
|
||||
svc.search({
|
||||
cwd: Instance.directory,
|
||||
pattern: args.pattern,
|
||||
glob: args.glob as string[] | undefined,
|
||||
limit: args.limit,
|
||||
}),
|
||||
),
|
||||
)
|
||||
process.stdout.write(JSON.stringify(results.items, null, 2) + EOL)
|
||||
})
|
||||
process.stdout.write(JSON.stringify(results, null, 2) + EOL)
|
||||
},
|
||||
})
|
||||
|
||||
@@ -148,6 +148,12 @@ async function handlePluginAuth(plugin: { auth: PluginAuth }, provider: string,
|
||||
}
|
||||
|
||||
if (method.type === "api") {
|
||||
const key = await prompts.password({
|
||||
message: "Enter your API key",
|
||||
validate: (x) => (x && x.length > 0 ? undefined : "Required"),
|
||||
})
|
||||
if (prompts.isCancel(key)) throw new UI.CancelledError()
|
||||
|
||||
if (method.authorize) {
|
||||
const result = await method.authorize(inputs)
|
||||
if (result.type === "failed") {
|
||||
@@ -157,7 +163,7 @@ async function handlePluginAuth(plugin: { auth: PluginAuth }, provider: string,
|
||||
const saveProvider = result.provider ?? provider
|
||||
await Auth.set(saveProvider, {
|
||||
type: "api",
|
||||
key: result.key,
|
||||
key: result.key ?? key,
|
||||
})
|
||||
prompts.log.success("Login successful")
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { render, TimeToFirstDraw, useKeyboard, useRenderer, useTerminalDimensions } from "@opentui/solid"
|
||||
import { Clipboard } from "@tui/util/clipboard"
|
||||
import { Selection } from "@tui/util/selection"
|
||||
import { Terminal } from "@tui/util/terminal"
|
||||
import { createCliRenderer, MouseButton, type CliRendererConfig } from "@opentui/core"
|
||||
import { RouteProvider, useRoute } from "@tui/context/route"
|
||||
import {
|
||||
@@ -60,66 +61,6 @@ import { TuiConfig } from "@/config/tui"
|
||||
import { createTuiApi, TuiPluginRuntime, type RouteMap } from "./plugin"
|
||||
import { FormatError, FormatUnknownError } from "@/cli/error"
|
||||
|
||||
async function getTerminalBackgroundColor(): Promise<"dark" | "light"> {
|
||||
// can't set raw mode if not a TTY
|
||||
if (!process.stdin.isTTY) return "dark"
|
||||
|
||||
return new Promise((resolve) => {
|
||||
let timeout: NodeJS.Timeout
|
||||
|
||||
const cleanup = () => {
|
||||
process.stdin.setRawMode(false)
|
||||
process.stdin.removeListener("data", handler)
|
||||
clearTimeout(timeout)
|
||||
}
|
||||
|
||||
const handler = (data: Buffer) => {
|
||||
const str = data.toString()
|
||||
const match = str.match(/\x1b]11;([^\x07\x1b]+)/)
|
||||
if (match) {
|
||||
cleanup()
|
||||
const color = match[1]
|
||||
// Parse RGB values from color string
|
||||
// Formats: rgb:RR/GG/BB or #RRGGBB or rgb(R,G,B)
|
||||
let r = 0,
|
||||
g = 0,
|
||||
b = 0
|
||||
|
||||
if (color.startsWith("rgb:")) {
|
||||
const parts = color.substring(4).split("/")
|
||||
r = parseInt(parts[0], 16) >> 8 // Convert 16-bit to 8-bit
|
||||
g = parseInt(parts[1], 16) >> 8 // Convert 16-bit to 8-bit
|
||||
b = parseInt(parts[2], 16) >> 8 // Convert 16-bit to 8-bit
|
||||
} else if (color.startsWith("#")) {
|
||||
r = parseInt(color.substring(1, 3), 16)
|
||||
g = parseInt(color.substring(3, 5), 16)
|
||||
b = parseInt(color.substring(5, 7), 16)
|
||||
} else if (color.startsWith("rgb(")) {
|
||||
const parts = color.substring(4, color.length - 1).split(",")
|
||||
r = parseInt(parts[0])
|
||||
g = parseInt(parts[1])
|
||||
b = parseInt(parts[2])
|
||||
}
|
||||
|
||||
// Calculate luminance using relative luminance formula
|
||||
const luminance = (0.299 * r + 0.587 * g + 0.114 * b) / 255
|
||||
|
||||
// Determine if dark or light based on luminance threshold
|
||||
resolve(luminance > 0.5 ? "light" : "dark")
|
||||
}
|
||||
}
|
||||
|
||||
process.stdin.setRawMode(true)
|
||||
process.stdin.on("data", handler)
|
||||
process.stdout.write("\x1b]11;?\x07")
|
||||
|
||||
timeout = setTimeout(() => {
|
||||
cleanup()
|
||||
resolve("dark")
|
||||
}, 1000)
|
||||
})
|
||||
}
|
||||
|
||||
import type { EventSource } from "./context/sdk"
|
||||
import { DialogVariant } from "./component/dialog-variant"
|
||||
|
||||
@@ -178,7 +119,7 @@ export function tui(input: {
|
||||
const unguard = win32InstallCtrlCGuard()
|
||||
win32DisableProcessedInput()
|
||||
|
||||
const mode = await getTerminalBackgroundColor()
|
||||
const mode = await Terminal.getTerminalBackgroundColor()
|
||||
|
||||
// Re-clear after getTerminalBackgroundColor() — setRawMode(false) restores
|
||||
// the original console mode which re-enables ENABLE_PROCESSED_INPUT.
|
||||
|
||||
@@ -2,6 +2,28 @@ import { RGBA } from "@opentui/core"
|
||||
|
||||
export namespace Terminal {
|
||||
export type Colors = Awaited<ReturnType<typeof colors>>
|
||||
|
||||
function parse(color: string): RGBA | null {
|
||||
if (color.startsWith("rgb:")) {
|
||||
const parts = color.substring(4).split("/")
|
||||
return RGBA.fromInts(parseInt(parts[0], 16) >> 8, parseInt(parts[1], 16) >> 8, parseInt(parts[2], 16) >> 8, 255)
|
||||
}
|
||||
if (color.startsWith("#")) {
|
||||
return RGBA.fromHex(color)
|
||||
}
|
||||
if (color.startsWith("rgb(")) {
|
||||
const parts = color.substring(4, color.length - 1).split(",")
|
||||
return RGBA.fromInts(parseInt(parts[0]), parseInt(parts[1]), parseInt(parts[2]), 255)
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
function mode(bg: RGBA | null): "dark" | "light" {
|
||||
if (!bg) return "dark"
|
||||
const luminance = (0.299 * bg.r + 0.587 * bg.g + 0.114 * bg.b) / 255
|
||||
return luminance > 0.5 ? "light" : "dark"
|
||||
}
|
||||
|
||||
/**
|
||||
* Query terminal colors including background, foreground, and palette (0-15).
|
||||
* Uses OSC escape sequences to retrieve actual terminal color values.
|
||||
@@ -31,46 +53,26 @@ export namespace Terminal {
|
||||
clearTimeout(timeout)
|
||||
}
|
||||
|
||||
const parseColor = (colorStr: string): RGBA | null => {
|
||||
if (colorStr.startsWith("rgb:")) {
|
||||
const parts = colorStr.substring(4).split("/")
|
||||
return RGBA.fromInts(
|
||||
parseInt(parts[0], 16) >> 8, // Convert 16-bit to 8-bit
|
||||
parseInt(parts[1], 16) >> 8,
|
||||
parseInt(parts[2], 16) >> 8,
|
||||
255,
|
||||
)
|
||||
}
|
||||
if (colorStr.startsWith("#")) {
|
||||
return RGBA.fromHex(colorStr)
|
||||
}
|
||||
if (colorStr.startsWith("rgb(")) {
|
||||
const parts = colorStr.substring(4, colorStr.length - 1).split(",")
|
||||
return RGBA.fromInts(parseInt(parts[0]), parseInt(parts[1]), parseInt(parts[2]), 255)
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
const handler = (data: Buffer) => {
|
||||
const str = data.toString()
|
||||
|
||||
// Match OSC 11 (background color)
|
||||
const bgMatch = str.match(/\x1b]11;([^\x07\x1b]+)/)
|
||||
if (bgMatch) {
|
||||
background = parseColor(bgMatch[1])
|
||||
background = parse(bgMatch[1])
|
||||
}
|
||||
|
||||
// Match OSC 10 (foreground color)
|
||||
const fgMatch = str.match(/\x1b]10;([^\x07\x1b]+)/)
|
||||
if (fgMatch) {
|
||||
foreground = parseColor(fgMatch[1])
|
||||
foreground = parse(fgMatch[1])
|
||||
}
|
||||
|
||||
// Match OSC 4 (palette colors)
|
||||
const paletteMatches = str.matchAll(/\x1b]4;(\d+);([^\x07\x1b]+)/g)
|
||||
for (const match of paletteMatches) {
|
||||
const index = parseInt(match[1])
|
||||
const color = parseColor(match[2])
|
||||
const color = parse(match[2])
|
||||
if (color) paletteColors[index] = color
|
||||
}
|
||||
|
||||
@@ -100,15 +102,36 @@ export namespace Terminal {
|
||||
})
|
||||
}
|
||||
|
||||
// Keep startup mode detection separate from `colors()`: the TUI boot path only
|
||||
// needs OSC 11 and should resolve on the first background response instead of
|
||||
// waiting on the full palette query used by system theme generation.
|
||||
export async function getTerminalBackgroundColor(): Promise<"dark" | "light"> {
|
||||
const result = await colors()
|
||||
if (!result.background) return "dark"
|
||||
if (!process.stdin.isTTY) return "dark"
|
||||
|
||||
const { r, g, b } = result.background
|
||||
// Calculate luminance using relative luminance formula
|
||||
const luminance = (0.299 * r + 0.587 * g + 0.114 * b) / 255
|
||||
return new Promise((resolve) => {
|
||||
let timeout: NodeJS.Timeout
|
||||
|
||||
// Determine if dark or light based on luminance threshold
|
||||
return luminance > 0.5 ? "light" : "dark"
|
||||
const cleanup = () => {
|
||||
process.stdin.setRawMode(false)
|
||||
process.stdin.removeListener("data", handler)
|
||||
clearTimeout(timeout)
|
||||
}
|
||||
|
||||
const handler = (data: Buffer) => {
|
||||
const match = data.toString().match(/\x1b]11;([^\x07\x1b]+)/)
|
||||
if (!match) return
|
||||
cleanup()
|
||||
resolve(mode(parse(match[1])))
|
||||
}
|
||||
|
||||
process.stdin.setRawMode(true)
|
||||
process.stdin.on("data", handler)
|
||||
process.stdout.write("\x1b]11;?\x07")
|
||||
|
||||
timeout = setTimeout(() => {
|
||||
cleanup()
|
||||
resolve("dark")
|
||||
}, 1000)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,10 +7,10 @@ import { Rpc } from "@/util/rpc"
|
||||
import { upgrade } from "@/cli/upgrade"
|
||||
import { Config } from "@/config/config"
|
||||
import { GlobalBus } from "@/bus/global"
|
||||
import type { GlobalEvent } from "@opencode-ai/sdk/v2"
|
||||
import { Flag } from "@/flag/flag"
|
||||
import { writeHeapSnapshot } from "node:v8"
|
||||
import { Heap } from "@/cli/heap"
|
||||
import { AppRuntime } from "@/effect/app-runtime"
|
||||
|
||||
await Log.init({
|
||||
print: process.argv.includes("--print-logs"),
|
||||
@@ -74,7 +74,7 @@ export const rpc = {
|
||||
async checkUpgrade(input: { directory: string }) {
|
||||
await Instance.provide({
|
||||
directory: input.directory,
|
||||
init: InstanceBootstrap,
|
||||
init: () => AppRuntime.runPromise(InstanceBootstrap),
|
||||
fn: async () => {
|
||||
await upgrade().catch(() => {})
|
||||
},
|
||||
|
||||
@@ -183,7 +183,9 @@ export namespace Command {
|
||||
}),
|
||||
)
|
||||
|
||||
export const defaultLayer = Layer.suspend(() =>
|
||||
layer.pipe(Layer.provide(Config.defaultLayer), Layer.provide(MCP.defaultLayer), Layer.provide(Skill.defaultLayer)),
|
||||
export const defaultLayer = layer.pipe(
|
||||
Layer.provide(Config.defaultLayer),
|
||||
Layer.provide(MCP.defaultLayer),
|
||||
Layer.provide(Skill.defaultLayer),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -22,21 +22,19 @@ import { Instance, type InstanceContext } from "../project/instance"
|
||||
import { LSPServer } from "../lsp/server"
|
||||
import { Installation } from "@/installation"
|
||||
import { ConfigMarkdown } from "./markdown"
|
||||
import { constants, existsSync } from "fs"
|
||||
import { existsSync } from "fs"
|
||||
import { Bus } from "@/bus"
|
||||
import { GlobalBus } from "@/bus/global"
|
||||
import { Event } from "../server/event"
|
||||
import { Glob } from "../util/glob"
|
||||
import { iife } from "@/util/iife"
|
||||
import { Account } from "@/account"
|
||||
import { isRecord } from "@/util/record"
|
||||
import { ConfigPaths } from "./paths"
|
||||
import { Filesystem } from "@/util/filesystem"
|
||||
import type { ConsoleState } from "./console-state"
|
||||
import { AppFileSystem } from "@/filesystem"
|
||||
import { InstanceState } from "@/effect/instance-state"
|
||||
import { makeRuntime } from "@/effect/run-service"
|
||||
import { Duration, Effect, Layer, Option, Context } from "effect"
|
||||
import { Context, Duration, Effect, Exit, Fiber, Layer, Option } from "effect"
|
||||
import { Flock } from "@/util/flock"
|
||||
import { isPathPluginSpec, parsePluginSpecifier, resolvePathPluginTarget } from "@/plugin/shared"
|
||||
import { Npm } from "@/npm"
|
||||
@@ -140,53 +138,11 @@ export namespace Config {
|
||||
}
|
||||
|
||||
export type InstallInput = {
|
||||
signal?: AbortSignal
|
||||
waitTick?: (input: { dir: string; attempt: number; delay: number; waited: number }) => void | Promise<void>
|
||||
}
|
||||
|
||||
export async function installDependencies(dir: string, input?: InstallInput) {
|
||||
if (!(await isWritable(dir))) return
|
||||
await using _ = await Flock.acquire(`config-install:${Filesystem.resolve(dir)}`, {
|
||||
signal: input?.signal,
|
||||
onWait: (tick) =>
|
||||
input?.waitTick?.({
|
||||
dir,
|
||||
attempt: tick.attempt,
|
||||
delay: tick.delay,
|
||||
waited: tick.waited,
|
||||
}),
|
||||
})
|
||||
input?.signal?.throwIfAborted()
|
||||
|
||||
const pkg = path.join(dir, "package.json")
|
||||
const target = Installation.isLocal() ? "*" : Installation.VERSION
|
||||
const json = await Filesystem.readJson<{ dependencies?: Record<string, string> }>(pkg).catch(() => ({
|
||||
dependencies: {},
|
||||
}))
|
||||
json.dependencies = {
|
||||
...json.dependencies,
|
||||
"@opencode-ai/plugin": target,
|
||||
}
|
||||
await Filesystem.writeJson(pkg, json)
|
||||
|
||||
const gitignore = path.join(dir, ".gitignore")
|
||||
const ignore = await Filesystem.exists(gitignore)
|
||||
if (!ignore) {
|
||||
await Filesystem.write(
|
||||
gitignore,
|
||||
["node_modules", "package.json", "package-lock.json", "bun.lock", ".gitignore"].join("\n"),
|
||||
)
|
||||
}
|
||||
await Npm.install(dir)
|
||||
}
|
||||
|
||||
async function isWritable(dir: string) {
|
||||
try {
|
||||
await fsNode.access(dir, constants.W_OK)
|
||||
return true
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
type Package = {
|
||||
dependencies?: Record<string, string>
|
||||
}
|
||||
|
||||
function rel(item: string, patterns: string[]) {
|
||||
@@ -1111,7 +1067,7 @@ export namespace Config {
|
||||
type State = {
|
||||
config: Info
|
||||
directories: string[]
|
||||
deps: Promise<void>[]
|
||||
deps: Fiber.Fiber<void, never>[]
|
||||
consoleState: ConsoleState
|
||||
}
|
||||
|
||||
@@ -1119,6 +1075,7 @@ export namespace Config {
|
||||
readonly get: () => Effect.Effect<Info>
|
||||
readonly getGlobal: () => Effect.Effect<Info>
|
||||
readonly getConsoleState: () => Effect.Effect<ConsoleState>
|
||||
readonly installDependencies: (dir: string, input?: InstallInput) => Effect.Effect<void, AppFileSystem.Error>
|
||||
readonly update: (config: Info) => Effect.Effect<void>
|
||||
readonly updateGlobal: (config: Info) => Effect.Effect<Info>
|
||||
readonly invalidate: (wait?: boolean) => Effect.Effect<void>
|
||||
@@ -1320,6 +1277,74 @@ export namespace Config {
|
||||
return yield* cachedGlobal
|
||||
})
|
||||
|
||||
const install = Effect.fnUntraced(function* (dir: string) {
|
||||
const pkg = path.join(dir, "package.json")
|
||||
const gitignore = path.join(dir, ".gitignore")
|
||||
const plugin = path.join(dir, "node_modules", "@opencode-ai", "plugin", "package.json")
|
||||
const target = Installation.isLocal() ? "*" : Installation.VERSION
|
||||
const json = yield* fs.readJson(pkg).pipe(
|
||||
Effect.catch(() => Effect.succeed({} satisfies Package)),
|
||||
Effect.map((x): Package => (isRecord(x) ? (x as Package) : {})),
|
||||
)
|
||||
const hasDep = json.dependencies?.["@opencode-ai/plugin"] === target
|
||||
const hasIgnore = yield* fs.existsSafe(gitignore)
|
||||
const hasPkg = yield* fs.existsSafe(plugin)
|
||||
|
||||
if (!hasDep) {
|
||||
yield* fs.writeJson(pkg, {
|
||||
...json,
|
||||
dependencies: {
|
||||
...json.dependencies,
|
||||
"@opencode-ai/plugin": target,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
if (!hasIgnore) {
|
||||
yield* fs.writeFileString(
|
||||
gitignore,
|
||||
["node_modules", "package.json", "package-lock.json", "bun.lock", ".gitignore"].join("\n"),
|
||||
)
|
||||
}
|
||||
|
||||
if (hasDep && hasIgnore && hasPkg) return
|
||||
|
||||
yield* Effect.promise(() => Npm.install(dir))
|
||||
})
|
||||
|
||||
const installDependencies = Effect.fn("Config.installDependencies")(function* (
|
||||
dir: string,
|
||||
input?: InstallInput,
|
||||
) {
|
||||
if (
|
||||
!(yield* fs.access(dir, { writable: true }).pipe(
|
||||
Effect.as(true),
|
||||
Effect.orElseSucceed(() => false),
|
||||
))
|
||||
)
|
||||
return
|
||||
|
||||
const key =
|
||||
process.platform === "win32" ? "config-install:win32" : `config-install:${AppFileSystem.resolve(dir)}`
|
||||
|
||||
yield* Effect.acquireUseRelease(
|
||||
Effect.promise((signal) =>
|
||||
Flock.acquire(key, {
|
||||
signal,
|
||||
onWait: (tick) =>
|
||||
input?.waitTick?.({
|
||||
dir,
|
||||
attempt: tick.attempt,
|
||||
delay: tick.delay,
|
||||
waited: tick.waited,
|
||||
}),
|
||||
}),
|
||||
),
|
||||
() => install(dir),
|
||||
(lease) => Effect.promise(() => lease.release()),
|
||||
)
|
||||
})
|
||||
|
||||
const loadInstanceState = Effect.fnUntraced(function* (ctx: InstanceContext) {
|
||||
const auth = yield* authSvc.all().pipe(Effect.orDie)
|
||||
|
||||
@@ -1402,7 +1427,7 @@ export namespace Config {
|
||||
log.debug("loading config from OPENCODE_CONFIG_DIR", { path: Flag.OPENCODE_CONFIG_DIR })
|
||||
}
|
||||
|
||||
const deps: Promise<void>[] = []
|
||||
const deps: Fiber.Fiber<void, never>[] = []
|
||||
|
||||
for (const dir of unique(directories)) {
|
||||
if (dir.endsWith(".opencode") || dir === Flag.OPENCODE_CONFIG_DIR) {
|
||||
@@ -1416,12 +1441,18 @@ export namespace Config {
|
||||
}
|
||||
}
|
||||
|
||||
const dep = iife(async () => {
|
||||
await installDependencies(dir)
|
||||
})
|
||||
void dep.catch((err) => {
|
||||
log.warn("background dependency install failed", { dir, error: err })
|
||||
})
|
||||
const dep = yield* installDependencies(dir).pipe(
|
||||
Effect.exit,
|
||||
Effect.tap((exit) =>
|
||||
Exit.isFailure(exit)
|
||||
? Effect.sync(() => {
|
||||
log.warn("background dependency install failed", { dir, error: String(exit.cause) })
|
||||
})
|
||||
: Effect.void,
|
||||
),
|
||||
Effect.asVoid,
|
||||
Effect.forkScoped,
|
||||
)
|
||||
deps.push(dep)
|
||||
|
||||
result.command = mergeDeep(result.command ?? {}, yield* Effect.promise(() => loadCommand(dir)))
|
||||
@@ -1558,7 +1589,9 @@ export namespace Config {
|
||||
})
|
||||
|
||||
const waitForDependencies = Effect.fn("Config.waitForDependencies")(function* () {
|
||||
yield* InstanceState.useEffect(state, (s) => Effect.promise(() => Promise.all(s.deps).then(() => undefined)))
|
||||
yield* InstanceState.useEffect(state, (s) =>
|
||||
Effect.forEach(s.deps, Fiber.join, { concurrency: "unbounded" }).pipe(Effect.asVoid),
|
||||
)
|
||||
})
|
||||
|
||||
const update = Effect.fn("Config.update")(function* (config: Info) {
|
||||
@@ -1613,6 +1646,7 @@ export namespace Config {
|
||||
get,
|
||||
getGlobal,
|
||||
getConsoleState,
|
||||
installDependencies,
|
||||
update,
|
||||
updateGlobal,
|
||||
invalidate,
|
||||
@@ -1642,6 +1676,10 @@ export namespace Config {
|
||||
return runPromise((svc) => svc.getConsoleState())
|
||||
}
|
||||
|
||||
export async function installDependencies(dir: string, input?: InstallInput) {
|
||||
return runPromise((svc) => svc.installDependencies(dir, input))
|
||||
}
|
||||
|
||||
export async function update(config: Info) {
|
||||
return runPromise((svc) => svc.update(config))
|
||||
}
|
||||
|
||||
@@ -49,7 +49,7 @@ import { ShareNext } from "@/share/share-next"
|
||||
import { SessionShare } from "@/share/session"
|
||||
|
||||
export const AppLayer = Layer.mergeAll(
|
||||
Observability.layer,
|
||||
// Observability.layer,
|
||||
AppFileSystem.defaultLayer,
|
||||
Bus.defaultLayer,
|
||||
Auth.defaultLayer,
|
||||
@@ -95,6 +95,6 @@ export const AppLayer = Layer.mergeAll(
|
||||
Installation.defaultLayer,
|
||||
ShareNext.defaultLayer,
|
||||
SessionShare.defaultLayer,
|
||||
)
|
||||
).pipe(Layer.provide(Observability.layer))
|
||||
|
||||
export const AppRuntime = ManagedRuntime.make(AppLayer, { memoMap })
|
||||
|
||||
@@ -1,10 +1,27 @@
|
||||
import { Layer, ManagedRuntime } from "effect"
|
||||
import { memoMap } from "./run-service"
|
||||
|
||||
import { Plugin } from "@/plugin"
|
||||
import { LSP } from "@/lsp"
|
||||
import { FileWatcher } from "@/file/watcher"
|
||||
import { Format } from "@/format"
|
||||
import { ShareNext } from "@/share/share-next"
|
||||
import { File } from "@/file"
|
||||
import { Vcs } from "@/project/vcs"
|
||||
import { Snapshot } from "@/snapshot"
|
||||
import { Bus } from "@/bus"
|
||||
import { Observability } from "./oltp"
|
||||
|
||||
export const BootstrapLayer = Layer.mergeAll(Format.defaultLayer, ShareNext.defaultLayer, FileWatcher.defaultLayer)
|
||||
export const BootstrapLayer = Layer.mergeAll(
|
||||
Plugin.defaultLayer,
|
||||
ShareNext.defaultLayer,
|
||||
Format.defaultLayer,
|
||||
LSP.defaultLayer,
|
||||
File.defaultLayer,
|
||||
FileWatcher.defaultLayer,
|
||||
Vcs.defaultLayer,
|
||||
Snapshot.defaultLayer,
|
||||
Bus.defaultLayer,
|
||||
).pipe(Layer.provide(Observability.layer))
|
||||
|
||||
export const BootstrapRuntime = ManagedRuntime.make(BootstrapLayer, { memoMap })
|
||||
|
||||
@@ -96,6 +96,7 @@ export namespace Ripgrep {
|
||||
|
||||
export type Result = z.infer<typeof Result>
|
||||
export type Match = z.infer<typeof Match>
|
||||
export type Item = Match["data"]
|
||||
export type Begin = z.infer<typeof Begin>
|
||||
export type End = z.infer<typeof End>
|
||||
export type Summary = z.infer<typeof Summary>
|
||||
@@ -289,6 +290,13 @@ export namespace Ripgrep {
|
||||
follow?: boolean
|
||||
maxDepth?: number
|
||||
}) => Stream.Stream<string, PlatformError>
|
||||
readonly search: (input: {
|
||||
cwd: string
|
||||
pattern: string
|
||||
glob?: string[]
|
||||
limit?: number
|
||||
follow?: boolean
|
||||
}) => Effect.Effect<{ items: Item[]; partial: boolean }, PlatformError | Error>
|
||||
}
|
||||
|
||||
export class Service extends Context.Service<Service, Interface>()("@opencode/Ripgrep") {}
|
||||
@@ -298,6 +306,32 @@ export namespace Ripgrep {
|
||||
Effect.gen(function* () {
|
||||
const spawner = yield* ChildProcessSpawner
|
||||
const afs = yield* AppFileSystem.Service
|
||||
const bin = Effect.fn("Ripgrep.path")(function* () {
|
||||
return yield* Effect.promise(() => filepath())
|
||||
})
|
||||
const args = Effect.fn("Ripgrep.args")(function* (input: {
|
||||
mode: "files" | "search"
|
||||
glob?: string[]
|
||||
hidden?: boolean
|
||||
follow?: boolean
|
||||
maxDepth?: number
|
||||
limit?: number
|
||||
pattern?: string
|
||||
}) {
|
||||
const out = [yield* bin(), input.mode === "search" ? "--json" : "--files", "--glob=!.git/*"]
|
||||
if (input.follow) out.push("--follow")
|
||||
if (input.hidden !== false) out.push("--hidden")
|
||||
if (input.maxDepth !== undefined) out.push(`--max-depth=${input.maxDepth}`)
|
||||
if (input.glob) {
|
||||
for (const g of input.glob) {
|
||||
out.push(`--glob=${g}`)
|
||||
}
|
||||
}
|
||||
if (input.limit) out.push(`--max-count=${input.limit}`)
|
||||
if (input.mode === "search") out.push("--no-messages")
|
||||
if (input.pattern) out.push("--", input.pattern)
|
||||
return out
|
||||
})
|
||||
|
||||
const files = Effect.fn("Ripgrep.files")(function* (input: {
|
||||
cwd: string
|
||||
@@ -306,7 +340,7 @@ export namespace Ripgrep {
|
||||
follow?: boolean
|
||||
maxDepth?: number
|
||||
}) {
|
||||
const rgPath = yield* Effect.promise(() => filepath())
|
||||
const rgPath = yield* bin()
|
||||
const isDir = yield* afs.isDir(input.cwd)
|
||||
if (!isDir) {
|
||||
return yield* Effect.die(
|
||||
@@ -318,23 +352,76 @@ export namespace Ripgrep {
|
||||
)
|
||||
}
|
||||
|
||||
const args = [rgPath, "--files", "--glob=!.git/*"]
|
||||
if (input.follow) args.push("--follow")
|
||||
if (input.hidden !== false) args.push("--hidden")
|
||||
if (input.maxDepth !== undefined) args.push(`--max-depth=${input.maxDepth}`)
|
||||
if (input.glob) {
|
||||
for (const g of input.glob) {
|
||||
args.push(`--glob=${g}`)
|
||||
}
|
||||
}
|
||||
const cmd = yield* args({
|
||||
mode: "files",
|
||||
glob: input.glob,
|
||||
hidden: input.hidden,
|
||||
follow: input.follow,
|
||||
maxDepth: input.maxDepth,
|
||||
})
|
||||
|
||||
return spawner
|
||||
.streamLines(ChildProcess.make(args[0], args.slice(1), { cwd: input.cwd }))
|
||||
.streamLines(ChildProcess.make(cmd[0], cmd.slice(1), { cwd: input.cwd }))
|
||||
.pipe(Stream.filter((line: string) => line.length > 0))
|
||||
})
|
||||
|
||||
const search = Effect.fn("Ripgrep.search")(function* (input: {
|
||||
cwd: string
|
||||
pattern: string
|
||||
glob?: string[]
|
||||
limit?: number
|
||||
follow?: boolean
|
||||
}) {
|
||||
return yield* Effect.scoped(
|
||||
Effect.gen(function* () {
|
||||
const cmd = yield* args({
|
||||
mode: "search",
|
||||
glob: input.glob,
|
||||
follow: input.follow,
|
||||
limit: input.limit,
|
||||
pattern: input.pattern,
|
||||
})
|
||||
|
||||
const handle = yield* spawner.spawn(
|
||||
ChildProcess.make(cmd[0], cmd.slice(1), {
|
||||
cwd: input.cwd,
|
||||
stdin: "ignore",
|
||||
}),
|
||||
)
|
||||
|
||||
const [stdout, stderr, code] = yield* Effect.all(
|
||||
[
|
||||
Stream.mkString(Stream.decodeText(handle.stdout)),
|
||||
Stream.mkString(Stream.decodeText(handle.stderr)),
|
||||
handle.exitCode,
|
||||
],
|
||||
{ concurrency: "unbounded" },
|
||||
)
|
||||
|
||||
if (code !== 0 && code !== 1 && code !== 2) {
|
||||
return yield* Effect.fail(new Error(`ripgrep failed: ${stderr}`))
|
||||
}
|
||||
|
||||
const items = stdout
|
||||
.trim()
|
||||
.split(/\r?\n/)
|
||||
.filter(Boolean)
|
||||
.map((line) => JSON.parse(line))
|
||||
.map((parsed) => Result.parse(parsed))
|
||||
.filter((row): row is Match => row.type === "match")
|
||||
.map((row) => row.data)
|
||||
|
||||
return {
|
||||
items,
|
||||
partial: code === 2,
|
||||
}
|
||||
}),
|
||||
)
|
||||
})
|
||||
|
||||
return Service.of({
|
||||
files: (input) => Stream.unwrap(files(input)),
|
||||
search,
|
||||
})
|
||||
}),
|
||||
)
|
||||
@@ -401,46 +488,4 @@ export namespace Ripgrep {
|
||||
|
||||
return lines.join("\n")
|
||||
}
|
||||
|
||||
export async function search(input: {
|
||||
cwd: string
|
||||
pattern: string
|
||||
glob?: string[]
|
||||
limit?: number
|
||||
follow?: boolean
|
||||
}) {
|
||||
const args = [`${await filepath()}`, "--json", "--hidden", "--glob=!.git/*"]
|
||||
if (input.follow) args.push("--follow")
|
||||
|
||||
if (input.glob) {
|
||||
for (const g of input.glob) {
|
||||
args.push(`--glob=${g}`)
|
||||
}
|
||||
}
|
||||
|
||||
if (input.limit) {
|
||||
args.push(`--max-count=${input.limit}`)
|
||||
}
|
||||
|
||||
args.push("--")
|
||||
args.push(input.pattern)
|
||||
|
||||
const result = await Process.text(args, {
|
||||
cwd: input.cwd,
|
||||
nothrow: true,
|
||||
})
|
||||
if (result.code !== 0) {
|
||||
return []
|
||||
}
|
||||
|
||||
// Handle both Unix (\n) and Windows (\r\n) line endings
|
||||
const lines = result.text.trim().split(/\r?\n/).filter(Boolean)
|
||||
// Parse JSON lines from ripgrep output
|
||||
|
||||
return lines
|
||||
.map((line) => JSON.parse(line))
|
||||
.map((parsed) => Result.parse(parsed))
|
||||
.filter((r) => r.type === "match")
|
||||
.map((r) => r.data)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import { text } from "node:stream/consumers"
|
||||
import { Npm } from "@/npm"
|
||||
import { Instance } from "../project/instance"
|
||||
import { Filesystem } from "../util/filesystem"
|
||||
@@ -217,26 +216,16 @@ export const rlang: Info = {
|
||||
name: "air",
|
||||
extensions: [".R"],
|
||||
async enabled() {
|
||||
const airPath = which("air")
|
||||
if (airPath == null) return false
|
||||
const air = which("air")
|
||||
if (air == null) return false
|
||||
|
||||
try {
|
||||
const proc = Process.spawn(["air", "--help"], {
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
})
|
||||
await proc.exited
|
||||
if (!proc.stdout) return false
|
||||
const output = await text(proc.stdout)
|
||||
const output = await Process.text([air, "--help"], { nothrow: true })
|
||||
|
||||
// Check for "Air: An R language server and formatter"
|
||||
const firstLine = output.split("\n")[0]
|
||||
const hasR = firstLine.includes("R language")
|
||||
const hasFormatter = firstLine.includes("formatter")
|
||||
if (hasR && hasFormatter) return ["air", "format", "$FILE"]
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
// Check for "Air: An R language server and formatter"
|
||||
const firstLine = output.text.split("\n")[0]
|
||||
const hasR = firstLine.includes("R language")
|
||||
const hasFormatter = firstLine.includes("formatter")
|
||||
if (output.code === 0 && hasR && hasFormatter) return [air, "format", "$FILE"]
|
||||
return false
|
||||
},
|
||||
}
|
||||
@@ -246,11 +235,10 @@ export const uvformat: Info = {
|
||||
extensions: [".py", ".pyi"],
|
||||
async enabled() {
|
||||
if (await ruff.enabled()) return false
|
||||
if (which("uv") !== null) {
|
||||
const proc = Process.spawn(["uv", "format", "--help"], { stderr: "pipe", stdout: "pipe" })
|
||||
const code = await proc.exited
|
||||
if (code === 0) return ["uv", "format", "--", "$FILE"]
|
||||
}
|
||||
const uv = which("uv")
|
||||
if (uv == null) return false
|
||||
const output = await Process.run([uv, "format", "--help"], { nothrow: true })
|
||||
if (output.code === 0) return [uv, "format", "--", "$FILE"]
|
||||
return false
|
||||
},
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ import { iife } from "@/util/iife"
|
||||
import { Log } from "../../util/log"
|
||||
import { setTimeout as sleep } from "node:timers/promises"
|
||||
import { CopilotModels } from "./models"
|
||||
import { MessageV2 } from "@/session/message-v2"
|
||||
|
||||
const log = Log.create({ service: "plugin.copilot" })
|
||||
|
||||
@@ -27,6 +28,21 @@ function base(enterpriseUrl?: string) {
|
||||
return enterpriseUrl ? `https://copilot-api.${normalizeDomain(enterpriseUrl)}` : "https://api.githubcopilot.com"
|
||||
}
|
||||
|
||||
// Check if a message is a synthetic user msg used to attach an image from a tool call
|
||||
function imgMsg(msg: any): boolean {
|
||||
if (msg?.role !== "user") return false
|
||||
|
||||
// Handle the 3 api formats
|
||||
|
||||
const content = msg.content
|
||||
if (typeof content === "string") return content === MessageV2.SYNTHETIC_ATTACHMENT_PROMPT
|
||||
if (!Array.isArray(content)) return false
|
||||
return content.some(
|
||||
(part: any) =>
|
||||
(part?.type === "text" || part?.type === "input_text") && part.text === MessageV2.SYNTHETIC_ATTACHMENT_PROMPT,
|
||||
)
|
||||
}
|
||||
|
||||
function fix(model: Model, url: string): Model {
|
||||
return {
|
||||
...model,
|
||||
@@ -90,7 +106,7 @@ export async function CopilotAuthPlugin(input: PluginInput): Promise<Hooks> {
|
||||
(msg: any) =>
|
||||
Array.isArray(msg.content) && msg.content.some((part: any) => part.type === "image_url"),
|
||||
),
|
||||
isAgent: last?.role !== "user",
|
||||
isAgent: last?.role !== "user" || imgMsg(last),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -102,7 +118,7 @@ export async function CopilotAuthPlugin(input: PluginInput): Promise<Hooks> {
|
||||
(item: any) =>
|
||||
Array.isArray(item?.content) && item.content.some((part: any) => part.type === "input_image"),
|
||||
),
|
||||
isAgent: last?.role !== "user",
|
||||
isAgent: last?.role !== "user" || imgMsg(last),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -124,7 +140,7 @@ export async function CopilotAuthPlugin(input: PluginInput): Promise<Hooks> {
|
||||
part.content.some((nested: any) => nested?.type === "image")),
|
||||
),
|
||||
),
|
||||
isAgent: !(last?.role === "user" && hasNonToolCalls),
|
||||
isAgent: !(last?.role === "user" && hasNonToolCalls) || imgMsg(last),
|
||||
}
|
||||
}
|
||||
} catch {}
|
||||
|
||||
@@ -9,24 +9,26 @@ import { Bus } from "../bus"
|
||||
import { Command } from "../command"
|
||||
import { Instance } from "./instance"
|
||||
import { Log } from "@/util/log"
|
||||
import { BootstrapRuntime } from "@/effect/bootstrap-runtime"
|
||||
import { FileWatcher } from "@/file/watcher"
|
||||
import { ShareNext } from "@/share/share-next"
|
||||
import * as Effect from "effect/Effect"
|
||||
|
||||
export async function InstanceBootstrap() {
|
||||
export const InstanceBootstrap = Effect.gen(function* () {
|
||||
Log.Default.info("bootstrapping", { directory: Instance.directory })
|
||||
await Plugin.init()
|
||||
void BootstrapRuntime.runPromise(ShareNext.Service.use((svc) => svc.init()))
|
||||
void BootstrapRuntime.runPromise(Format.Service.use((svc) => svc.init()))
|
||||
await LSP.init()
|
||||
File.init()
|
||||
void BootstrapRuntime.runPromise(FileWatcher.Service.use((svc) => svc.init()))
|
||||
Vcs.init()
|
||||
Snapshot.init()
|
||||
yield* Plugin.Service.use((svc) => svc.init())
|
||||
yield* ShareNext.Service.use((svc) => svc.init()).pipe(Effect.forkDetach)
|
||||
yield* Format.Service.use((svc) => svc.init()).pipe(Effect.forkDetach)
|
||||
yield* LSP.Service.use((svc) => svc.init())
|
||||
yield* File.Service.use((svc) => svc.init()).pipe(Effect.forkDetach)
|
||||
yield* FileWatcher.Service.use((svc) => svc.init()).pipe(Effect.forkDetach)
|
||||
yield* Vcs.Service.use((svc) => svc.init()).pipe(Effect.forkDetach)
|
||||
yield* Snapshot.Service.use((svc) => svc.init()).pipe(Effect.forkDetach)
|
||||
|
||||
Bus.subscribe(Command.Event.Executed, async (payload) => {
|
||||
if (payload.properties.name === Command.Default.INIT) {
|
||||
Project.setInitialized(Instance.project.id)
|
||||
}
|
||||
})
|
||||
}
|
||||
yield* Bus.Service.use((svc) =>
|
||||
svc.subscribeCallback(Command.Event.Executed, async (payload) => {
|
||||
if (payload.properties.name === Command.Default.INIT) {
|
||||
Project.setInitialized(Instance.project.id)
|
||||
}
|
||||
}),
|
||||
)
|
||||
}).pipe(Effect.withSpan("InstanceBootstrap"))
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
// Auto-generated by build.ts - do not edit
|
||||
export declare const snapshot: Record<string, unknown>
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -209,6 +209,9 @@ export namespace ProviderTransform {
|
||||
copilot: {
|
||||
copilot_cache_control: { type: "ephemeral" },
|
||||
},
|
||||
alibaba: {
|
||||
cacheControl: { type: "ephemeral" },
|
||||
},
|
||||
}
|
||||
|
||||
for (const msg of unique([...system, ...final])) {
|
||||
@@ -285,7 +288,8 @@ export namespace ProviderTransform {
|
||||
model.api.id.includes("claude") ||
|
||||
model.id.includes("anthropic") ||
|
||||
model.id.includes("claude") ||
|
||||
model.api.npm === "@ai-sdk/anthropic") &&
|
||||
model.api.npm === "@ai-sdk/anthropic" ||
|
||||
model.api.npm === "@ai-sdk/alibaba") &&
|
||||
model.api.npm !== "@ai-sdk/gateway"
|
||||
) {
|
||||
msgs = applyCaching(msgs, model)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { Hono } from "hono"
|
||||
import { describeRoute, validator, resolver } from "hono-openapi"
|
||||
import z from "zod"
|
||||
import { AppRuntime } from "../../effect/app-runtime"
|
||||
import { File } from "../../file"
|
||||
import { Ripgrep } from "../../file/ripgrep"
|
||||
import { LSP } from "../../lsp"
|
||||
@@ -34,12 +35,10 @@ export const FileRoutes = lazy(() =>
|
||||
),
|
||||
async (c) => {
|
||||
const pattern = c.req.valid("query").pattern
|
||||
const result = await Ripgrep.search({
|
||||
cwd: Instance.directory,
|
||||
pattern,
|
||||
limit: 10,
|
||||
})
|
||||
return c.json(result)
|
||||
const result = await AppRuntime.runPromise(
|
||||
Ripgrep.Service.use((svc) => svc.search({ cwd: Instance.directory, pattern, limit: 10 })),
|
||||
)
|
||||
return c.json(result.items)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
|
||||
@@ -10,6 +10,7 @@ import { InstanceBootstrap } from "@/project/bootstrap"
|
||||
import { Session } from "@/session"
|
||||
import { SessionID } from "@/session/schema"
|
||||
import { WorkspaceContext } from "@/control-plane/workspace-context"
|
||||
import { AppRuntime } from "@/effect/app-runtime"
|
||||
|
||||
type Rule = { method?: string; path: string; exact?: boolean; action: "local" | "forward" }
|
||||
|
||||
@@ -66,7 +67,7 @@ export function WorkspaceRouterMiddleware(upgrade: UpgradeWebSocket): Middleware
|
||||
if (!workspaceID) {
|
||||
return Instance.provide({
|
||||
directory,
|
||||
init: InstanceBootstrap,
|
||||
init: () => AppRuntime.runPromise(InstanceBootstrap),
|
||||
async fn() {
|
||||
return next()
|
||||
},
|
||||
@@ -103,7 +104,7 @@ export function WorkspaceRouterMiddleware(upgrade: UpgradeWebSocket): Middleware
|
||||
fn: () =>
|
||||
Instance.provide({
|
||||
directory: target.directory,
|
||||
init: InstanceBootstrap,
|
||||
init: () => AppRuntime.runPromise(InstanceBootstrap),
|
||||
async fn() {
|
||||
return next()
|
||||
},
|
||||
|
||||
@@ -8,6 +8,7 @@ import { ProjectID } from "../../project/schema"
|
||||
import { errors } from "../error"
|
||||
import { lazy } from "../../util/lazy"
|
||||
import { InstanceBootstrap } from "../../project/bootstrap"
|
||||
import { AppRuntime } from "@/effect/app-runtime"
|
||||
|
||||
export const ProjectRoutes = lazy(() =>
|
||||
new Hono()
|
||||
@@ -83,7 +84,7 @@ export const ProjectRoutes = lazy(() =>
|
||||
directory: dir,
|
||||
worktree: dir,
|
||||
project: next,
|
||||
init: InstanceBootstrap,
|
||||
init: () => AppRuntime.runPromise(InstanceBootstrap),
|
||||
})
|
||||
return c.json(next)
|
||||
},
|
||||
|
||||
@@ -4,7 +4,7 @@ import { BusEvent } from "@/bus/bus-event"
|
||||
import { Bus } from "@/bus"
|
||||
import { Decimal } from "decimal.js"
|
||||
import z from "zod"
|
||||
import { type ProviderMetadata } from "ai"
|
||||
import { type ProviderMetadata, type LanguageModelUsage } from "ai"
|
||||
import { Flag } from "../flag/flag"
|
||||
import { Installation } from "../installation"
|
||||
|
||||
@@ -28,7 +28,6 @@ import { SessionID, MessageID, PartID } from "./schema"
|
||||
import type { Provider } from "@/provider/provider"
|
||||
import { Permission } from "@/permission"
|
||||
import { Global } from "@/global"
|
||||
import type { LanguageModelV2Usage } from "@ai-sdk/provider"
|
||||
import { Effect, Layer, Option, Context } from "effect"
|
||||
import { makeRuntime } from "@/effect/run-service"
|
||||
|
||||
@@ -240,7 +239,7 @@ export namespace Session {
|
||||
|
||||
export const getUsage = (input: {
|
||||
model: Provider.Model
|
||||
usage: LanguageModelV2Usage
|
||||
usage: LanguageModelUsage
|
||||
metadata?: ProviderMetadata
|
||||
}) => {
|
||||
const safe = (value: number) => {
|
||||
@@ -249,11 +248,14 @@ export namespace Session {
|
||||
}
|
||||
const inputTokens = safe(input.usage.inputTokens ?? 0)
|
||||
const outputTokens = safe(input.usage.outputTokens ?? 0)
|
||||
const reasoningTokens = safe(input.usage.reasoningTokens ?? 0)
|
||||
const reasoningTokens = safe(input.usage.outputTokenDetails?.reasoningTokens ?? input.usage.reasoningTokens ?? 0)
|
||||
|
||||
const cacheReadInputTokens = safe(input.usage.cachedInputTokens ?? 0)
|
||||
const cacheReadInputTokens = safe(
|
||||
input.usage.inputTokenDetails?.cacheReadTokens ?? input.usage.cachedInputTokens ?? 0,
|
||||
)
|
||||
const cacheWriteInputTokens = safe(
|
||||
(input.metadata?.["anthropic"]?.["cacheCreationInputTokens"] ??
|
||||
(input.usage.inputTokenDetails?.cacheWriteTokens ??
|
||||
input.metadata?.["anthropic"]?.["cacheCreationInputTokens"] ??
|
||||
// google-vertex-anthropic returns metadata under "vertex" key
|
||||
// (AnthropicMessagesLanguageModel custom provider key from 'vertex.anthropic.messages')
|
||||
input.metadata?.["vertex"]?.["cacheCreationInputTokens"] ??
|
||||
@@ -274,7 +276,7 @@ export namespace Session {
|
||||
const tokens = {
|
||||
total,
|
||||
input: adjustedInputTokens,
|
||||
output: outputTokens - reasoningTokens,
|
||||
output: safe(outputTokens - reasoningTokens),
|
||||
reasoning: reasoningTokens,
|
||||
cache: {
|
||||
write: cacheWriteInputTokens,
|
||||
@@ -678,9 +680,7 @@ export namespace Session {
|
||||
}),
|
||||
)
|
||||
|
||||
export const defaultLayer = Layer.suspend(() =>
|
||||
layer.pipe(Layer.provide(Bus.layer), Layer.provide(Storage.defaultLayer)),
|
||||
)
|
||||
export const defaultLayer = layer.pipe(Layer.provide(Bus.layer), Layer.provide(Storage.defaultLayer))
|
||||
|
||||
const { runPromise } = makeRuntime(Service, defaultLayer)
|
||||
|
||||
|
||||
@@ -25,6 +25,8 @@ interface FetchDecompressionError extends Error {
|
||||
}
|
||||
|
||||
export namespace MessageV2 {
|
||||
export const SYNTHETIC_ATTACHMENT_PROMPT = "Attached image(s) from tool result:"
|
||||
|
||||
export function isMedia(mime: string) {
|
||||
return mime.startsWith("image/") || mime === "application/pdf"
|
||||
}
|
||||
@@ -808,7 +810,7 @@ export namespace MessageV2 {
|
||||
parts: [
|
||||
{
|
||||
type: "text" as const,
|
||||
text: "Attached image(s) from tool result:",
|
||||
text: SYNTHETIC_ATTACHMENT_PROMPT,
|
||||
},
|
||||
...media.map((attachment) => ({
|
||||
type: "file" as const,
|
||||
|
||||
@@ -339,14 +339,12 @@ export namespace ShareNext {
|
||||
}),
|
||||
)
|
||||
|
||||
export const defaultLayer = Layer.suspend(() =>
|
||||
layer.pipe(
|
||||
Layer.provide(Bus.layer),
|
||||
Layer.provide(Account.defaultLayer),
|
||||
Layer.provide(Config.defaultLayer),
|
||||
Layer.provide(FetchHttpClient.layer),
|
||||
Layer.provide(Provider.defaultLayer),
|
||||
Layer.provide(Session.defaultLayer),
|
||||
),
|
||||
export const defaultLayer = layer.pipe(
|
||||
Layer.provide(Bus.layer),
|
||||
Layer.provide(Account.defaultLayer),
|
||||
Layer.provide(Config.defaultLayer),
|
||||
Layer.provide(FetchHttpClient.layer),
|
||||
Layer.provide(Provider.defaultLayer),
|
||||
Layer.provide(Session.defaultLayer),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -230,13 +230,11 @@ export namespace Skill {
|
||||
}),
|
||||
)
|
||||
|
||||
export const defaultLayer = Layer.suspend(() =>
|
||||
layer.pipe(
|
||||
Layer.provide(Discovery.defaultLayer),
|
||||
Layer.provide(Config.defaultLayer),
|
||||
Layer.provide(Bus.layer),
|
||||
Layer.provide(AppFileSystem.defaultLayer),
|
||||
),
|
||||
export const defaultLayer = layer.pipe(
|
||||
Layer.provide(Discovery.defaultLayer),
|
||||
Layer.provide(Config.defaultLayer),
|
||||
Layer.provide(Bus.layer),
|
||||
Layer.provide(AppFileSystem.defaultLayer),
|
||||
)
|
||||
|
||||
export function fmt(list: Info[], opts: { verbose: boolean }) {
|
||||
|
||||
@@ -177,8 +177,39 @@ export namespace Snapshot {
|
||||
const all = Array.from(new Set([...tracked, ...untracked]))
|
||||
if (!all.length) return
|
||||
|
||||
// Filter out files that are now gitignored even if previously tracked
|
||||
// Files may have been tracked before being gitignored, so we need to check
|
||||
// against the source project's current gitignore rules
|
||||
// Use --no-index to check purely against patterns (ignoring whether file is tracked)
|
||||
const checkArgs = [
|
||||
...quote,
|
||||
"--git-dir",
|
||||
path.join(state.worktree, ".git"),
|
||||
"--work-tree",
|
||||
state.worktree,
|
||||
"check-ignore",
|
||||
"--no-index",
|
||||
"--",
|
||||
...all,
|
||||
]
|
||||
const check = yield* git(checkArgs, { cwd: state.directory })
|
||||
const ignored =
|
||||
check.code === 0 ? new Set(check.text.trim().split("\n").filter(Boolean)) : new Set<string>()
|
||||
const filtered = all.filter((item) => !ignored.has(item))
|
||||
|
||||
// Remove newly-ignored files from snapshot index to prevent re-adding
|
||||
if (ignored.size > 0) {
|
||||
const ignoredFiles = Array.from(ignored)
|
||||
log.info("removing gitignored files from snapshot", { count: ignoredFiles.length })
|
||||
yield* git([...cfg, ...args(["rm", "--cached", "-f", "--", ...ignoredFiles])], {
|
||||
cwd: state.directory,
|
||||
})
|
||||
}
|
||||
|
||||
if (!filtered.length) return
|
||||
|
||||
const large = (yield* Effect.all(
|
||||
all.map((item) =>
|
||||
filtered.map((item) =>
|
||||
fs
|
||||
.stat(path.join(state.directory, item))
|
||||
.pipe(Effect.catch(() => Effect.void))
|
||||
@@ -259,14 +290,39 @@ export namespace Snapshot {
|
||||
log.warn("failed to get diff", { hash, exitCode: result.code })
|
||||
return { hash, files: [] }
|
||||
}
|
||||
const files = result.text
|
||||
.trim()
|
||||
.split("\n")
|
||||
.map((x) => x.trim())
|
||||
.filter(Boolean)
|
||||
|
||||
// Filter out files that are now gitignored
|
||||
if (files.length > 0) {
|
||||
const checkArgs = [
|
||||
...quote,
|
||||
"--git-dir",
|
||||
path.join(state.worktree, ".git"),
|
||||
"--work-tree",
|
||||
state.worktree,
|
||||
"check-ignore",
|
||||
"--no-index",
|
||||
"--",
|
||||
...files,
|
||||
]
|
||||
const check = yield* git(checkArgs, { cwd: state.directory })
|
||||
if (check.code === 0) {
|
||||
const ignored = new Set(check.text.trim().split("\n").filter(Boolean))
|
||||
const filtered = files.filter((item) => !ignored.has(item))
|
||||
return {
|
||||
hash,
|
||||
files: filtered.map((x) => path.join(state.worktree, x).replaceAll("\\", "/")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
hash,
|
||||
files: result.text
|
||||
.trim()
|
||||
.split("\n")
|
||||
.map((x) => x.trim())
|
||||
.filter(Boolean)
|
||||
.map((x) => path.join(state.worktree, x).replaceAll("\\", "/")),
|
||||
files: files.map((x) => path.join(state.worktree, x).replaceAll("\\", "/")),
|
||||
}
|
||||
}),
|
||||
)
|
||||
@@ -616,6 +672,30 @@ export namespace Snapshot {
|
||||
} satisfies Row,
|
||||
]
|
||||
})
|
||||
|
||||
// Filter out files that are now gitignored
|
||||
if (rows.length > 0) {
|
||||
const files = rows.map((r) => r.file)
|
||||
const checkArgs = [
|
||||
...quote,
|
||||
"--git-dir",
|
||||
path.join(state.worktree, ".git"),
|
||||
"--work-tree",
|
||||
state.worktree,
|
||||
"check-ignore",
|
||||
"--no-index",
|
||||
"--",
|
||||
...files,
|
||||
]
|
||||
const check = yield* git(checkArgs, { cwd: state.directory })
|
||||
if (check.code === 0) {
|
||||
const ignored = new Set(check.text.trim().split("\n").filter(Boolean))
|
||||
const filtered = rows.filter((r) => !ignored.has(r.file))
|
||||
rows.length = 0
|
||||
rows.push(...filtered)
|
||||
}
|
||||
}
|
||||
|
||||
const step = 100
|
||||
const patch = (file: string, before: string, after: string) =>
|
||||
formatPatch(structuredPatch(file, file, before, after, "", "", { context: Number.MAX_SAFE_INTEGER }))
|
||||
|
||||
@@ -1,11 +1,8 @@
|
||||
import z from "zod"
|
||||
import { Effect } from "effect"
|
||||
import * as Stream from "effect/Stream"
|
||||
import { Effect, Option } from "effect"
|
||||
import { Tool } from "./tool"
|
||||
import { Filesystem } from "../util/filesystem"
|
||||
import { Ripgrep } from "../file/ripgrep"
|
||||
import { ChildProcess } from "effect/unstable/process"
|
||||
import { ChildProcessSpawner } from "effect/unstable/process/ChildProcessSpawner"
|
||||
import { AppFileSystem } from "../filesystem"
|
||||
|
||||
import DESCRIPTION from "./grep.txt"
|
||||
import { Instance } from "../project/instance"
|
||||
@@ -17,7 +14,8 @@ const MAX_LINE_LENGTH = 2000
|
||||
export const GrepTool = Tool.define(
|
||||
"grep",
|
||||
Effect.gen(function* () {
|
||||
const spawner = yield* ChildProcessSpawner
|
||||
const fs = yield* AppFileSystem.Service
|
||||
const rg = yield* Ripgrep.Service
|
||||
|
||||
return {
|
||||
description: DESCRIPTION,
|
||||
@@ -28,6 +26,11 @@ export const GrepTool = Tool.define(
|
||||
}),
|
||||
execute: (params: { pattern: string; path?: string; include?: string }, ctx: Tool.Context) =>
|
||||
Effect.gen(function* () {
|
||||
const empty = {
|
||||
title: params.pattern,
|
||||
metadata: { matches: 0, truncated: false },
|
||||
output: "No files found",
|
||||
}
|
||||
if (!params.pattern) {
|
||||
throw new Error("pattern is required")
|
||||
}
|
||||
@@ -43,92 +46,58 @@ export const GrepTool = Tool.define(
|
||||
},
|
||||
})
|
||||
|
||||
let searchPath = params.path ?? Instance.directory
|
||||
searchPath = path.isAbsolute(searchPath) ? searchPath : path.resolve(Instance.directory, searchPath)
|
||||
const searchPath = AppFileSystem.resolve(
|
||||
path.isAbsolute(params.path ?? Instance.directory)
|
||||
? (params.path ?? Instance.directory)
|
||||
: path.join(Instance.directory, params.path ?? "."),
|
||||
)
|
||||
yield* assertExternalDirectoryEffect(ctx, searchPath, { kind: "directory" })
|
||||
|
||||
const rgPath = yield* Effect.promise(() => Ripgrep.filepath())
|
||||
const args = ["-nH", "--hidden", "--no-messages", "--field-match-separator=|", "--regexp", params.pattern]
|
||||
if (params.include) {
|
||||
args.push("--glob", params.include)
|
||||
}
|
||||
args.push(searchPath)
|
||||
const result = yield* rg.search({
|
||||
cwd: searchPath,
|
||||
pattern: params.pattern,
|
||||
glob: params.include ? [params.include] : undefined,
|
||||
})
|
||||
|
||||
const result = yield* Effect.scoped(
|
||||
Effect.gen(function* () {
|
||||
const handle = yield* spawner.spawn(
|
||||
ChildProcess.make(rgPath, args, {
|
||||
stdin: "ignore",
|
||||
}),
|
||||
)
|
||||
if (result.items.length === 0) return empty
|
||||
|
||||
const [output, errorOutput] = yield* Effect.all(
|
||||
[Stream.mkString(Stream.decodeText(handle.stdout)), Stream.mkString(Stream.decodeText(handle.stderr))],
|
||||
{ concurrency: 2 },
|
||||
)
|
||||
|
||||
const exitCode = yield* handle.exitCode
|
||||
|
||||
return { output, errorOutput, exitCode }
|
||||
}),
|
||||
const rows = result.items.map((item) => ({
|
||||
path: AppFileSystem.resolve(
|
||||
path.isAbsolute(item.path.text) ? item.path.text : path.join(searchPath, item.path.text),
|
||||
),
|
||||
line: item.line_number,
|
||||
text: item.lines.text,
|
||||
}))
|
||||
const times = new Map(
|
||||
(yield* Effect.forEach(
|
||||
[...new Set(rows.map((row) => row.path))],
|
||||
Effect.fnUntraced(function* (file) {
|
||||
const info = yield* fs.stat(file).pipe(Effect.catch(() => Effect.succeed(undefined)))
|
||||
if (!info || info.type === "Directory") return undefined
|
||||
return [
|
||||
file,
|
||||
info.mtime.pipe(
|
||||
Option.map((time) => time.getTime()),
|
||||
Option.getOrElse(() => 0),
|
||||
) ?? 0,
|
||||
] as const
|
||||
}),
|
||||
{ concurrency: 16 },
|
||||
)).filter((entry): entry is readonly [string, number] => Boolean(entry)),
|
||||
)
|
||||
const matches = rows.flatMap((row) => {
|
||||
const mtime = times.get(row.path)
|
||||
if (mtime === undefined) return []
|
||||
return [{ ...row, mtime }]
|
||||
})
|
||||
|
||||
const { output, errorOutput, exitCode } = result
|
||||
|
||||
// Exit codes: 0 = matches found, 1 = no matches, 2 = errors (but may still have matches)
|
||||
// With --no-messages, we suppress error output but still get exit code 2 for broken symlinks etc.
|
||||
// Only fail if exit code is 2 AND no output was produced
|
||||
if (exitCode === 1 || (exitCode === 2 && !output.trim())) {
|
||||
return {
|
||||
title: params.pattern,
|
||||
metadata: { matches: 0, truncated: false },
|
||||
output: "No files found",
|
||||
}
|
||||
}
|
||||
|
||||
if (exitCode !== 0 && exitCode !== 2) {
|
||||
throw new Error(`ripgrep failed: ${errorOutput}`)
|
||||
}
|
||||
|
||||
const hasErrors = exitCode === 2
|
||||
|
||||
// Handle both Unix (\n) and Windows (\r\n) line endings
|
||||
const lines = output.trim().split(/\r?\n/)
|
||||
const matches = []
|
||||
|
||||
for (const line of lines) {
|
||||
if (!line) continue
|
||||
|
||||
const [filePath, lineNumStr, ...lineTextParts] = line.split("|")
|
||||
if (!filePath || !lineNumStr || lineTextParts.length === 0) continue
|
||||
|
||||
const lineNum = parseInt(lineNumStr, 10)
|
||||
const lineText = lineTextParts.join("|")
|
||||
|
||||
const stats = Filesystem.stat(filePath)
|
||||
if (!stats) continue
|
||||
|
||||
matches.push({
|
||||
path: filePath,
|
||||
modTime: stats.mtime.getTime(),
|
||||
lineNum,
|
||||
lineText,
|
||||
})
|
||||
}
|
||||
|
||||
matches.sort((a, b) => b.modTime - a.modTime)
|
||||
matches.sort((a, b) => b.mtime - a.mtime)
|
||||
|
||||
const limit = 100
|
||||
const truncated = matches.length > limit
|
||||
const finalMatches = truncated ? matches.slice(0, limit) : matches
|
||||
|
||||
if (finalMatches.length === 0) {
|
||||
return {
|
||||
title: params.pattern,
|
||||
metadata: { matches: 0, truncated: false },
|
||||
output: "No files found",
|
||||
}
|
||||
}
|
||||
if (finalMatches.length === 0) return empty
|
||||
|
||||
const totalMatches = matches.length
|
||||
const outputLines = [`Found ${totalMatches} matches${truncated ? ` (showing first ${limit})` : ""}`]
|
||||
@@ -143,10 +112,8 @@ export const GrepTool = Tool.define(
|
||||
outputLines.push(`${match.path}:`)
|
||||
}
|
||||
const truncatedLineText =
|
||||
match.lineText.length > MAX_LINE_LENGTH
|
||||
? match.lineText.substring(0, MAX_LINE_LENGTH) + "..."
|
||||
: match.lineText
|
||||
outputLines.push(` Line ${match.lineNum}: ${truncatedLineText}`)
|
||||
match.text.length > MAX_LINE_LENGTH ? match.text.substring(0, MAX_LINE_LENGTH) + "..." : match.text
|
||||
outputLines.push(` Line ${match.line}: ${truncatedLineText}`)
|
||||
}
|
||||
|
||||
if (truncated) {
|
||||
@@ -156,7 +123,7 @@ export const GrepTool = Tool.define(
|
||||
)
|
||||
}
|
||||
|
||||
if (hasErrors) {
|
||||
if (result.partial) {
|
||||
outputLines.push("")
|
||||
outputLines.push("(Some paths were inaccessible and skipped)")
|
||||
}
|
||||
|
||||
@@ -10,59 +10,106 @@ export namespace Message {
|
||||
})),
|
||||
)
|
||||
|
||||
export class File extends Schema.Class<File>("Message.File")({
|
||||
url: Schema.String,
|
||||
export class Source extends Schema.Class<Source>("Message.Source")({
|
||||
start: Schema.Number,
|
||||
end: Schema.Number,
|
||||
text: Schema.String,
|
||||
}) {}
|
||||
|
||||
export class FileAttachment extends Schema.Class<FileAttachment>("Message.File.Attachment")({
|
||||
uri: Schema.String,
|
||||
mime: Schema.String,
|
||||
name: Schema.String.pipe(Schema.optional),
|
||||
description: Schema.String.pipe(Schema.optional),
|
||||
source: Source.pipe(Schema.optional),
|
||||
}) {
|
||||
static create(url: string) {
|
||||
return new File({
|
||||
url,
|
||||
return new FileAttachment({
|
||||
uri: url,
|
||||
mime: "text/plain",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export class UserContent extends Schema.Class<UserContent>("Message.User.Content")({
|
||||
text: Schema.String,
|
||||
synthetic: Schema.Boolean.pipe(Schema.optional),
|
||||
agent: Schema.String.pipe(Schema.optional),
|
||||
files: Schema.Array(File).pipe(Schema.optional),
|
||||
export class AgentAttachment extends Schema.Class<AgentAttachment>("Message.Agent.Attachment")({
|
||||
name: Schema.String,
|
||||
source: Source.pipe(Schema.optional),
|
||||
}) {}
|
||||
|
||||
export class User extends Schema.Class<User>("Message.User")({
|
||||
id: ID,
|
||||
type: Schema.Literal("user"),
|
||||
text: Schema.String,
|
||||
files: Schema.Array(FileAttachment).pipe(Schema.optional),
|
||||
agents: Schema.Array(AgentAttachment).pipe(Schema.optional),
|
||||
time: Schema.Struct({
|
||||
created: Schema.DateTimeUtc,
|
||||
}),
|
||||
content: UserContent,
|
||||
}) {
|
||||
static create(content: Schema.Schema.Type<typeof UserContent>) {
|
||||
static create(input: { text: User["text"]; files?: User["files"]; agents?: User["agents"] }) {
|
||||
const msg = new User({
|
||||
id: ID.create(),
|
||||
type: "user",
|
||||
...input,
|
||||
time: {
|
||||
created: Effect.runSync(DateTime.now),
|
||||
},
|
||||
content,
|
||||
})
|
||||
return msg
|
||||
}
|
||||
|
||||
static file(url: string) {
|
||||
return new File({
|
||||
url,
|
||||
mime: "text/plain",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export namespace User {}
|
||||
export class Synthetic extends Schema.Class<Synthetic>("Message.Synthetic")({
|
||||
id: ID,
|
||||
type: Schema.Literal("synthetic"),
|
||||
text: Schema.String,
|
||||
time: Schema.Struct({
|
||||
created: Schema.DateTimeUtc,
|
||||
}),
|
||||
}) {}
|
||||
|
||||
export class Request extends Schema.Class<Request>("Message.Request")({
|
||||
id: ID,
|
||||
type: Schema.Literal("start"),
|
||||
model: Schema.Struct({
|
||||
id: Schema.String,
|
||||
providerID: Schema.String,
|
||||
variant: Schema.String.pipe(Schema.optional),
|
||||
}),
|
||||
time: Schema.Struct({
|
||||
created: Schema.DateTimeUtc,
|
||||
}),
|
||||
}) {}
|
||||
|
||||
export class Text extends Schema.Class<Text>("Message.Text")({
|
||||
id: ID,
|
||||
type: Schema.Literal("text"),
|
||||
text: Schema.String,
|
||||
time: Schema.Struct({
|
||||
created: Schema.DateTimeUtc,
|
||||
completed: Schema.DateTimeUtc.pipe(Schema.optional),
|
||||
}),
|
||||
}) {}
|
||||
|
||||
export class Complete extends Schema.Class<Complete>("Message.Complete")({
|
||||
id: ID,
|
||||
type: Schema.Literal("complete"),
|
||||
time: Schema.Struct({
|
||||
created: Schema.DateTimeUtc,
|
||||
}),
|
||||
cost: Schema.Number,
|
||||
tokens: Schema.Struct({
|
||||
total: Schema.Number,
|
||||
input: Schema.Number,
|
||||
output: Schema.Number,
|
||||
reasoning: Schema.Number,
|
||||
cache: Schema.Struct({
|
||||
read: Schema.Number,
|
||||
write: Schema.Number,
|
||||
}),
|
||||
}),
|
||||
}) {}
|
||||
|
||||
export const Info = Schema.Union([User, Text])
|
||||
export type Info = Schema.Schema.Type<typeof Info>
|
||||
}
|
||||
|
||||
const msg = Message.User.create({
|
||||
text: "Hello world",
|
||||
files: [Message.File.create("file://example.com/file.txt")],
|
||||
})
|
||||
|
||||
console.log(JSON.stringify(msg, null, 2))
|
||||
|
||||
71
packages/opencode/src/v2/session.ts
Normal file
71
packages/opencode/src/v2/session.ts
Normal file
@@ -0,0 +1,71 @@
|
||||
import { Context, Layer, Schema, Effect } from "effect"
|
||||
import { Message } from "./message"
|
||||
import { Struct } from "effect"
|
||||
import { Identifier } from "@/id/id"
|
||||
import { withStatics } from "@/util/schema"
|
||||
import { Session } from "@/session"
|
||||
import { SessionID } from "@/session/schema"
|
||||
|
||||
export namespace SessionV2 {
|
||||
export const ID = SessionID
|
||||
|
||||
export type ID = Schema.Schema.Type<typeof ID>
|
||||
|
||||
export class PromptInput extends Schema.Class<PromptInput>("Session.PromptInput")({
|
||||
...Struct.omit(Message.User.fields, ["time", "type"]),
|
||||
id: Schema.optionalKey(Message.ID),
|
||||
sessionID: SessionV2.ID,
|
||||
}) {}
|
||||
|
||||
export class CreateInput extends Schema.Class<CreateInput>("Session.CreateInput")({
|
||||
id: Schema.optionalKey(SessionV2.ID),
|
||||
}) {}
|
||||
|
||||
export class Info extends Schema.Class<Info>("Session.Info")({
|
||||
id: SessionV2.ID,
|
||||
model: Schema.Struct({
|
||||
id: Schema.String,
|
||||
providerID: Schema.String,
|
||||
modelID: Schema.String,
|
||||
}).pipe(Schema.optional),
|
||||
}) {}
|
||||
|
||||
export interface Interface {
|
||||
fromID: (id: SessionV2.ID) => Effect.Effect<Info>
|
||||
create: (input: CreateInput) => Effect.Effect<Info>
|
||||
prompt: (input: PromptInput) => Effect.Effect<Message.User>
|
||||
}
|
||||
|
||||
export class Service extends Context.Service<Service, Interface>()("Session.Service") {}
|
||||
|
||||
export const layer = Layer.effect(Service)(
|
||||
Effect.gen(function* () {
|
||||
const session = yield* Session.Service
|
||||
|
||||
const create: Interface["create"] = Effect.fn("Session.create")(function* (input) {
|
||||
throw new Error("Not implemented")
|
||||
})
|
||||
|
||||
const prompt: Interface["prompt"] = Effect.fn("Session.prompt")(function* (input) {
|
||||
throw new Error("Not implemented")
|
||||
})
|
||||
|
||||
const fromID: Interface["fromID"] = Effect.fn("Session.fromID")(function* (id) {
|
||||
const match = yield* session.get(id)
|
||||
return fromV1(match)
|
||||
})
|
||||
|
||||
return Service.of({
|
||||
create,
|
||||
prompt,
|
||||
fromID,
|
||||
})
|
||||
}),
|
||||
)
|
||||
|
||||
function fromV1(input: Session.Info): Info {
|
||||
return new Info({
|
||||
id: SessionV2.ID.make(input.id),
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -20,6 +20,7 @@ import { AppFileSystem } from "@/filesystem"
|
||||
import { makeRuntime } from "@/effect/run-service"
|
||||
import * as CrossSpawnSpawner from "@/effect/cross-spawn-spawner"
|
||||
import { InstanceState } from "@/effect/instance-state"
|
||||
import { AppRuntime } from "@/effect/app-runtime"
|
||||
|
||||
export namespace Worktree {
|
||||
const log = Log.create({ service: "worktree" })
|
||||
@@ -266,7 +267,7 @@ export namespace Worktree {
|
||||
const booted = yield* Effect.promise(() =>
|
||||
Instance.provide({
|
||||
directory: info.directory,
|
||||
init: InstanceBootstrap,
|
||||
init: () => AppRuntime.runPromise(InstanceBootstrap),
|
||||
fn: () => undefined,
|
||||
})
|
||||
.then(() => true)
|
||||
|
||||
@@ -6,7 +6,6 @@ import { tmpdir } from "../../fixture/fixture"
|
||||
import { createTuiPluginApi } from "../../fixture/tui-plugin"
|
||||
import { Global } from "../../../src/global"
|
||||
import { TuiConfig } from "../../../src/config/tui"
|
||||
import { Config } from "../../../src/config/config"
|
||||
import { Filesystem } from "../../../src/util/filesystem"
|
||||
|
||||
const { allThemes, addTheme } = await import("../../../src/cli/cmd/tui/context/theme")
|
||||
@@ -325,7 +324,6 @@ export default {
|
||||
})
|
||||
const cwd = spyOn(process, "cwd").mockImplementation(() => tmp.path)
|
||||
const wait = spyOn(TuiConfig, "waitForDependencies").mockResolvedValue()
|
||||
const install = spyOn(Config, "installDependencies").mockResolvedValue()
|
||||
|
||||
try {
|
||||
expect(addTheme(tmp.extra.preloadedThemeName, { theme: { primary: "#303030" } })).toBe(true)
|
||||
@@ -407,7 +405,6 @@ export default {
|
||||
await TuiPluginRuntime.dispose()
|
||||
cwd.mockRestore()
|
||||
wait.mockRestore()
|
||||
install.mockRestore()
|
||||
if (backup === undefined) {
|
||||
await fs.rm(globalConfigPath, { force: true })
|
||||
} else {
|
||||
@@ -701,7 +698,6 @@ test("updates installed theme when plugin metadata changes", async () => {
|
||||
process.env.OPENCODE_PLUGIN_META_FILE = path.join(tmp.path, "plugin-meta.json")
|
||||
const cwd = spyOn(process, "cwd").mockImplementation(() => tmp.path)
|
||||
const wait = spyOn(TuiConfig, "waitForDependencies").mockResolvedValue()
|
||||
const install = spyOn(Config, "installDependencies").mockResolvedValue()
|
||||
|
||||
const api = () =>
|
||||
createTuiPluginApi({
|
||||
@@ -746,7 +742,6 @@ test("updates installed theme when plugin metadata changes", async () => {
|
||||
await TuiPluginRuntime.dispose()
|
||||
cwd.mockRestore()
|
||||
wait.mockRestore()
|
||||
install.mockRestore()
|
||||
delete process.env.OPENCODE_PLUGIN_META_FILE
|
||||
}
|
||||
})
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { test, expect, describe, mock, afterEach, beforeEach, spyOn } from "bun:test"
|
||||
import { Effect, Layer, Option } from "effect"
|
||||
import { Deferred, Effect, Fiber, Layer, Option } from "effect"
|
||||
import { NodeFileSystem, NodePath } from "@effect/platform-node"
|
||||
import { Config } from "../../src/config/config"
|
||||
import { Instance } from "../../src/project/instance"
|
||||
@@ -7,8 +7,9 @@ import { Auth } from "../../src/auth"
|
||||
import { AccessToken, Account, AccountID, OrgID } from "../../src/account"
|
||||
import { AppFileSystem } from "../../src/filesystem"
|
||||
import { provideTmpdirInstance } from "../fixture/fixture"
|
||||
import { tmpdir } from "../fixture/fixture"
|
||||
import { tmpdir, tmpdirScoped } from "../fixture/fixture"
|
||||
import * as CrossSpawnSpawner from "../../src/effect/cross-spawn-spawner"
|
||||
import { testEffect } from "../lib/effect"
|
||||
|
||||
/** Infra layer that provides FileSystem, Path, ChildProcessSpawner for test fixtures */
|
||||
const infra = CrossSpawnSpawner.defaultLayer.pipe(
|
||||
@@ -32,6 +33,18 @@ const emptyAuth = Layer.mock(Auth.Service)({
|
||||
all: () => Effect.succeed({}),
|
||||
})
|
||||
|
||||
const it = testEffect(
|
||||
Config.layer.pipe(
|
||||
Layer.provide(AppFileSystem.defaultLayer),
|
||||
Layer.provide(emptyAuth),
|
||||
Layer.provide(emptyAccount),
|
||||
Layer.provideMerge(infra),
|
||||
),
|
||||
)
|
||||
|
||||
const installDeps = (dir: string, input?: Config.InstallInput) =>
|
||||
Config.Service.use((svc) => svc.installDependencies(dir, input))
|
||||
|
||||
// Get managed config directory from environment (set in preload.ts)
|
||||
const managedConfigDir = process.env.OPENCODE_TEST_MANAGED_CONFIG_DIR!
|
||||
|
||||
@@ -817,128 +830,134 @@ test("installs dependencies in writable OPENCODE_CONFIG_DIR", async () => {
|
||||
}
|
||||
})
|
||||
|
||||
test("dedupes concurrent config dependency installs for the same dir", async () => {
|
||||
await using tmp = await tmpdir()
|
||||
const dir = path.join(tmp.path, "a")
|
||||
await fs.mkdir(dir, { recursive: true })
|
||||
it.live("dedupes concurrent config dependency installs for the same dir", () =>
|
||||
Effect.gen(function* () {
|
||||
const tmp = yield* tmpdirScoped()
|
||||
const dir = path.join(tmp, "a")
|
||||
yield* Effect.promise(() => fs.mkdir(dir, { recursive: true }))
|
||||
|
||||
const ticks: number[] = []
|
||||
let calls = 0
|
||||
let start = () => {}
|
||||
let done = () => {}
|
||||
let blocked = () => {}
|
||||
const ready = new Promise<void>((resolve) => {
|
||||
start = resolve
|
||||
})
|
||||
const gate = new Promise<void>((resolve) => {
|
||||
done = resolve
|
||||
})
|
||||
const waiting = new Promise<void>((resolve) => {
|
||||
blocked = resolve
|
||||
})
|
||||
const online = spyOn(Network, "online").mockReturnValue(false)
|
||||
const targetDir = dir
|
||||
const run = spyOn(Npm, "install").mockImplementation(async (d: string) => {
|
||||
const hit = path.normalize(d) === path.normalize(targetDir)
|
||||
if (hit) {
|
||||
let calls = 0
|
||||
const online = spyOn(Network, "online").mockReturnValue(false)
|
||||
const ready = Deferred.makeUnsafe<void>()
|
||||
const blocked = Deferred.makeUnsafe<void>()
|
||||
const hold = Deferred.makeUnsafe<void>()
|
||||
const target = path.normalize(dir)
|
||||
const run = spyOn(Npm, "install").mockImplementation(async (d: string) => {
|
||||
if (path.normalize(d) !== target) return
|
||||
calls += 1
|
||||
start()
|
||||
await gate
|
||||
}
|
||||
const mod = path.join(d, "node_modules", "@opencode-ai", "plugin")
|
||||
await fs.mkdir(mod, { recursive: true })
|
||||
await Filesystem.write(
|
||||
path.join(mod, "package.json"),
|
||||
JSON.stringify({ name: "@opencode-ai/plugin", version: "1.0.0" }),
|
||||
)
|
||||
if (hit) {
|
||||
start()
|
||||
await gate
|
||||
}
|
||||
})
|
||||
|
||||
try {
|
||||
const first = Config.installDependencies(dir)
|
||||
await ready
|
||||
const second = Config.installDependencies(dir, {
|
||||
waitTick: (tick) => {
|
||||
ticks.push(tick.attempt)
|
||||
blocked()
|
||||
blocked = () => {}
|
||||
},
|
||||
Deferred.doneUnsafe(ready, Effect.void)
|
||||
await Effect.runPromise(Deferred.await(hold))
|
||||
const mod = path.join(d, "node_modules", "@opencode-ai", "plugin")
|
||||
await fs.mkdir(mod, { recursive: true })
|
||||
await Filesystem.write(
|
||||
path.join(mod, "package.json"),
|
||||
JSON.stringify({ name: "@opencode-ai/plugin", version: "1.0.0" }),
|
||||
)
|
||||
})
|
||||
await waiting
|
||||
done()
|
||||
await Promise.all([first, second])
|
||||
} finally {
|
||||
online.mockRestore()
|
||||
run.mockRestore()
|
||||
}
|
||||
|
||||
expect(calls).toBe(2)
|
||||
expect(ticks.length).toBeGreaterThan(0)
|
||||
expect(await Filesystem.exists(path.join(dir, "package.json"))).toBe(true)
|
||||
})
|
||||
|
||||
test("serializes config dependency installs across dirs", async () => {
|
||||
if (process.platform !== "win32") return
|
||||
|
||||
await using tmp = await tmpdir()
|
||||
const a = path.join(tmp.path, "a")
|
||||
const b = path.join(tmp.path, "b")
|
||||
await fs.mkdir(a, { recursive: true })
|
||||
await fs.mkdir(b, { recursive: true })
|
||||
|
||||
let calls = 0
|
||||
let open = 0
|
||||
let peak = 0
|
||||
let start = () => {}
|
||||
let done = () => {}
|
||||
const ready = new Promise<void>((resolve) => {
|
||||
start = resolve
|
||||
})
|
||||
const gate = new Promise<void>((resolve) => {
|
||||
done = resolve
|
||||
})
|
||||
|
||||
const online = spyOn(Network, "online").mockReturnValue(false)
|
||||
const run = spyOn(Npm, "install").mockImplementation(async (dir: string) => {
|
||||
const cwd = path.normalize(dir)
|
||||
const hit = cwd === path.normalize(a) || cwd === path.normalize(b)
|
||||
if (hit) {
|
||||
calls += 1
|
||||
open += 1
|
||||
peak = Math.max(peak, open)
|
||||
if (calls === 1) {
|
||||
start()
|
||||
await gate
|
||||
}
|
||||
}
|
||||
const mod = path.join(cwd, "node_modules", "@opencode-ai", "plugin")
|
||||
await fs.mkdir(mod, { recursive: true })
|
||||
await Filesystem.write(
|
||||
path.join(mod, "package.json"),
|
||||
JSON.stringify({ name: "@opencode-ai/plugin", version: "1.0.0" }),
|
||||
yield* Effect.addFinalizer(() =>
|
||||
Effect.sync(() => {
|
||||
online.mockRestore()
|
||||
run.mockRestore()
|
||||
}),
|
||||
)
|
||||
if (hit) {
|
||||
open -= 1
|
||||
}
|
||||
})
|
||||
|
||||
try {
|
||||
const first = Config.installDependencies(a)
|
||||
await ready
|
||||
const second = Config.installDependencies(b)
|
||||
done()
|
||||
await Promise.all([first, second])
|
||||
} finally {
|
||||
online.mockRestore()
|
||||
run.mockRestore()
|
||||
}
|
||||
const first = yield* installDeps(dir).pipe(Effect.forkScoped)
|
||||
yield* Deferred.await(ready)
|
||||
|
||||
expect(calls).toBe(2)
|
||||
expect(peak).toBe(1)
|
||||
})
|
||||
let done = false
|
||||
const second = yield* installDeps(dir, {
|
||||
waitTick: () => {
|
||||
Deferred.doneUnsafe(blocked, Effect.void)
|
||||
},
|
||||
}).pipe(
|
||||
Effect.tap(() =>
|
||||
Effect.sync(() => {
|
||||
done = true
|
||||
}),
|
||||
),
|
||||
Effect.forkScoped,
|
||||
)
|
||||
|
||||
yield* Deferred.await(blocked)
|
||||
expect(done).toBe(false)
|
||||
|
||||
yield* Deferred.succeed(hold, void 0)
|
||||
yield* Fiber.join(first)
|
||||
yield* Fiber.join(second)
|
||||
|
||||
expect(calls).toBe(1)
|
||||
expect(yield* Effect.promise(() => Filesystem.exists(path.join(dir, "package.json")))).toBe(true)
|
||||
}),
|
||||
)
|
||||
|
||||
it.live("serializes config dependency installs across dirs", () =>
|
||||
Effect.gen(function* () {
|
||||
if (process.platform !== "win32") return
|
||||
|
||||
const tmp = yield* tmpdirScoped()
|
||||
const a = path.join(tmp, "a")
|
||||
const b = path.join(tmp, "b")
|
||||
yield* Effect.promise(() => fs.mkdir(a, { recursive: true }))
|
||||
yield* Effect.promise(() => fs.mkdir(b, { recursive: true }))
|
||||
|
||||
let calls = 0
|
||||
let open = 0
|
||||
let peak = 0
|
||||
const ready = Deferred.makeUnsafe<void>()
|
||||
const blocked = Deferred.makeUnsafe<void>()
|
||||
const hold = Deferred.makeUnsafe<void>()
|
||||
|
||||
const online = spyOn(Network, "online").mockReturnValue(false)
|
||||
const run = spyOn(Npm, "install").mockImplementation(async (dir: string) => {
|
||||
const cwd = path.normalize(dir)
|
||||
const hit = cwd === path.normalize(a) || cwd === path.normalize(b)
|
||||
if (hit) {
|
||||
calls += 1
|
||||
open += 1
|
||||
peak = Math.max(peak, open)
|
||||
if (calls === 1) {
|
||||
Deferred.doneUnsafe(ready, Effect.void)
|
||||
await Effect.runPromise(Deferred.await(hold))
|
||||
}
|
||||
}
|
||||
const mod = path.join(cwd, "node_modules", "@opencode-ai", "plugin")
|
||||
await fs.mkdir(mod, { recursive: true })
|
||||
await Filesystem.write(
|
||||
path.join(mod, "package.json"),
|
||||
JSON.stringify({ name: "@opencode-ai/plugin", version: "1.0.0" }),
|
||||
)
|
||||
if (hit) {
|
||||
open -= 1
|
||||
}
|
||||
})
|
||||
|
||||
yield* Effect.addFinalizer(() =>
|
||||
Effect.sync(() => {
|
||||
online.mockRestore()
|
||||
run.mockRestore()
|
||||
}),
|
||||
)
|
||||
|
||||
const first = yield* installDeps(a).pipe(Effect.forkScoped)
|
||||
yield* Deferred.await(ready)
|
||||
|
||||
const second = yield* installDeps(b, {
|
||||
waitTick: () => {
|
||||
Deferred.doneUnsafe(blocked, Effect.void)
|
||||
},
|
||||
}).pipe(Effect.forkScoped)
|
||||
yield* Deferred.await(blocked)
|
||||
expect(peak).toBe(1)
|
||||
|
||||
yield* Deferred.succeed(hold, void 0)
|
||||
yield* Fiber.join(first)
|
||||
yield* Fiber.join(second)
|
||||
|
||||
expect(calls).toBe(2)
|
||||
expect(peak).toBe(1)
|
||||
}),
|
||||
)
|
||||
|
||||
test("resolves scoped npm plugins in config", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
|
||||
@@ -38,7 +38,9 @@ describe("file.ripgrep", () => {
|
||||
expect(hasVisible).toBe(true)
|
||||
expect(hasHidden).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe("Ripgrep.Service", () => {
|
||||
test("search returns empty when nothing matches", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
@@ -46,16 +48,34 @@ describe("file.ripgrep", () => {
|
||||
},
|
||||
})
|
||||
|
||||
const hits = await Ripgrep.search({
|
||||
cwd: tmp.path,
|
||||
pattern: "needle",
|
||||
const result = await Effect.gen(function* () {
|
||||
const rg = yield* Ripgrep.Service
|
||||
return yield* rg.search({ cwd: tmp.path, pattern: "needle" })
|
||||
}).pipe(Effect.provide(Ripgrep.defaultLayer), Effect.runPromise)
|
||||
|
||||
expect(result.partial).toBe(false)
|
||||
expect(result.items).toEqual([])
|
||||
})
|
||||
|
||||
test("search returns matched rows", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Bun.write(path.join(dir, "match.ts"), "const value = 'needle'\n")
|
||||
await Bun.write(path.join(dir, "skip.txt"), "const value = 'other'\n")
|
||||
},
|
||||
})
|
||||
|
||||
expect(hits).toEqual([])
|
||||
})
|
||||
})
|
||||
const result = await Effect.gen(function* () {
|
||||
const rg = yield* Ripgrep.Service
|
||||
return yield* rg.search({ cwd: tmp.path, pattern: "needle", glob: ["*.ts"] })
|
||||
}).pipe(Effect.provide(Ripgrep.defaultLayer), Effect.runPromise)
|
||||
|
||||
expect(result.partial).toBe(false)
|
||||
expect(result.items).toHaveLength(1)
|
||||
expect(result.items[0]?.path.text).toContain("match.ts")
|
||||
expect(result.items[0]?.lines.text).toContain("needle")
|
||||
})
|
||||
|
||||
describe("Ripgrep.Service", () => {
|
||||
test("files returns stream of filenames", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
|
||||
@@ -1842,6 +1842,11 @@ describe("ProviderTransform.message - cache control on gateway", () => {
|
||||
type: "ephemeral",
|
||||
},
|
||||
},
|
||||
alibaba: {
|
||||
cacheControl: {
|
||||
type: "ephemeral",
|
||||
},
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1894,6 +1899,11 @@ describe("ProviderTransform.message - cache control on gateway", () => {
|
||||
type: "ephemeral",
|
||||
},
|
||||
},
|
||||
alibaba: {
|
||||
cacheControl: {
|
||||
type: "ephemeral",
|
||||
},
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -43,7 +43,6 @@ describe("project.initGit endpoint", () => {
|
||||
worktree: tmp.path,
|
||||
})
|
||||
expect(reloadSpy).toHaveBeenCalledTimes(1)
|
||||
expect(reloadSpy.mock.calls[0]?.[0]?.init).toBe(InstanceBootstrap)
|
||||
expect(seen.some((evt) => evt.directory === tmp.path && evt.payload.type === "server.instance.disposed")).toBe(
|
||||
true,
|
||||
)
|
||||
|
||||
@@ -1005,6 +1005,15 @@ describe("session.getUsage", () => {
|
||||
inputTokens: 1000,
|
||||
outputTokens: 500,
|
||||
totalTokens: 1500,
|
||||
inputTokenDetails: {
|
||||
noCacheTokens: undefined,
|
||||
cacheReadTokens: undefined,
|
||||
cacheWriteTokens: undefined,
|
||||
},
|
||||
outputTokenDetails: {
|
||||
textTokens: undefined,
|
||||
reasoningTokens: undefined,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
@@ -1023,7 +1032,15 @@ describe("session.getUsage", () => {
|
||||
inputTokens: 1000,
|
||||
outputTokens: 500,
|
||||
totalTokens: 1500,
|
||||
cachedInputTokens: 200,
|
||||
inputTokenDetails: {
|
||||
noCacheTokens: 800,
|
||||
cacheReadTokens: 200,
|
||||
cacheWriteTokens: undefined,
|
||||
},
|
||||
outputTokenDetails: {
|
||||
textTokens: undefined,
|
||||
reasoningTokens: undefined,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
@@ -1039,6 +1056,15 @@ describe("session.getUsage", () => {
|
||||
inputTokens: 1000,
|
||||
outputTokens: 500,
|
||||
totalTokens: 1500,
|
||||
inputTokenDetails: {
|
||||
noCacheTokens: undefined,
|
||||
cacheReadTokens: undefined,
|
||||
cacheWriteTokens: undefined,
|
||||
},
|
||||
outputTokenDetails: {
|
||||
textTokens: undefined,
|
||||
reasoningTokens: undefined,
|
||||
},
|
||||
},
|
||||
metadata: {
|
||||
anthropic: {
|
||||
@@ -1059,7 +1085,15 @@ describe("session.getUsage", () => {
|
||||
inputTokens: 1000,
|
||||
outputTokens: 500,
|
||||
totalTokens: 1500,
|
||||
cachedInputTokens: 200,
|
||||
inputTokenDetails: {
|
||||
noCacheTokens: 800,
|
||||
cacheReadTokens: 200,
|
||||
cacheWriteTokens: undefined,
|
||||
},
|
||||
outputTokenDetails: {
|
||||
textTokens: undefined,
|
||||
reasoningTokens: undefined,
|
||||
},
|
||||
},
|
||||
metadata: {
|
||||
anthropic: {},
|
||||
@@ -1078,7 +1112,15 @@ describe("session.getUsage", () => {
|
||||
inputTokens: 1000,
|
||||
outputTokens: 500,
|
||||
totalTokens: 1500,
|
||||
reasoningTokens: 100,
|
||||
inputTokenDetails: {
|
||||
noCacheTokens: undefined,
|
||||
cacheReadTokens: undefined,
|
||||
cacheWriteTokens: undefined,
|
||||
},
|
||||
outputTokenDetails: {
|
||||
textTokens: 400,
|
||||
reasoningTokens: 100,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
@@ -1104,7 +1146,15 @@ describe("session.getUsage", () => {
|
||||
inputTokens: 0,
|
||||
outputTokens: 1_000_000,
|
||||
totalTokens: 1_000_000,
|
||||
reasoningTokens: 250_000,
|
||||
inputTokenDetails: {
|
||||
noCacheTokens: undefined,
|
||||
cacheReadTokens: undefined,
|
||||
cacheWriteTokens: undefined,
|
||||
},
|
||||
outputTokenDetails: {
|
||||
textTokens: 750_000,
|
||||
reasoningTokens: 250_000,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
@@ -1121,6 +1171,15 @@ describe("session.getUsage", () => {
|
||||
inputTokens: 0,
|
||||
outputTokens: 0,
|
||||
totalTokens: 0,
|
||||
inputTokenDetails: {
|
||||
noCacheTokens: undefined,
|
||||
cacheReadTokens: undefined,
|
||||
cacheWriteTokens: undefined,
|
||||
},
|
||||
outputTokenDetails: {
|
||||
textTokens: undefined,
|
||||
reasoningTokens: undefined,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
@@ -1148,6 +1207,15 @@ describe("session.getUsage", () => {
|
||||
inputTokens: 1_000_000,
|
||||
outputTokens: 100_000,
|
||||
totalTokens: 1_100_000,
|
||||
inputTokenDetails: {
|
||||
noCacheTokens: undefined,
|
||||
cacheReadTokens: undefined,
|
||||
cacheWriteTokens: undefined,
|
||||
},
|
||||
outputTokenDetails: {
|
||||
textTokens: undefined,
|
||||
reasoningTokens: undefined,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
@@ -1163,7 +1231,15 @@ describe("session.getUsage", () => {
|
||||
inputTokens: 1000,
|
||||
outputTokens: 500,
|
||||
totalTokens: 1500,
|
||||
cachedInputTokens: 200,
|
||||
inputTokenDetails: {
|
||||
noCacheTokens: 800,
|
||||
cacheReadTokens: 200,
|
||||
cacheWriteTokens: undefined,
|
||||
},
|
||||
outputTokenDetails: {
|
||||
textTokens: undefined,
|
||||
reasoningTokens: undefined,
|
||||
},
|
||||
}
|
||||
if (npm === "@ai-sdk/amazon-bedrock") {
|
||||
const result = Session.getUsage({
|
||||
@@ -1214,7 +1290,15 @@ describe("session.getUsage", () => {
|
||||
inputTokens: 1000,
|
||||
outputTokens: 500,
|
||||
totalTokens: 1500,
|
||||
cachedInputTokens: 200,
|
||||
inputTokenDetails: {
|
||||
noCacheTokens: 800,
|
||||
cacheReadTokens: 200,
|
||||
cacheWriteTokens: undefined,
|
||||
},
|
||||
outputTokenDetails: {
|
||||
textTokens: undefined,
|
||||
reasoningTokens: undefined,
|
||||
},
|
||||
},
|
||||
metadata: {
|
||||
vertex: {
|
||||
|
||||
@@ -511,6 +511,49 @@ test("circular symlinks", async () => {
|
||||
})
|
||||
})
|
||||
|
||||
test("source project gitignore is respected - ignored files are not snapshotted", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
git: true,
|
||||
init: async (dir) => {
|
||||
// Create gitignore BEFORE any tracking
|
||||
await Filesystem.write(`${dir}/.gitignore`, "*.ignored\nbuild/\nnode_modules/\n")
|
||||
await Filesystem.write(`${dir}/tracked.txt`, "tracked content")
|
||||
await Filesystem.write(`${dir}/ignored.ignored`, "ignored content")
|
||||
await $`mkdir -p ${dir}/build`.quiet()
|
||||
await Filesystem.write(`${dir}/build/output.js`, "build output")
|
||||
await Filesystem.write(`${dir}/normal.js`, "normal js")
|
||||
await $`git add .`.cwd(dir).quiet()
|
||||
await $`git commit -m init`.cwd(dir).quiet()
|
||||
},
|
||||
})
|
||||
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const before = await Snapshot.track()
|
||||
expect(before).toBeTruthy()
|
||||
|
||||
// Modify tracked files and create new ones - some ignored, some not
|
||||
await Filesystem.write(`${tmp.path}/tracked.txt`, "modified tracked")
|
||||
await Filesystem.write(`${tmp.path}/new.ignored`, "new ignored")
|
||||
await Filesystem.write(`${tmp.path}/new-tracked.txt`, "new tracked")
|
||||
await Filesystem.write(`${tmp.path}/build/new-build.js`, "new build file")
|
||||
|
||||
const patch = await Snapshot.patch(before!)
|
||||
|
||||
// Modified and new tracked files should be in snapshot
|
||||
expect(patch.files).toContain(fwd(tmp.path, "new-tracked.txt"))
|
||||
expect(patch.files).toContain(fwd(tmp.path, "tracked.txt"))
|
||||
|
||||
// Ignored files should NOT be in snapshot
|
||||
expect(patch.files).not.toContain(fwd(tmp.path, "new.ignored"))
|
||||
expect(patch.files).not.toContain(fwd(tmp.path, "ignored.ignored"))
|
||||
expect(patch.files).not.toContain(fwd(tmp.path, "build/output.js"))
|
||||
expect(patch.files).not.toContain(fwd(tmp.path, "build/new-build.js"))
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("gitignore changes", async () => {
|
||||
await using tmp = await bootstrap()
|
||||
await Instance.provide({
|
||||
@@ -535,6 +578,75 @@ test("gitignore changes", async () => {
|
||||
})
|
||||
})
|
||||
|
||||
test("files tracked in snapshot but now gitignored are filtered out", async () => {
|
||||
await using tmp = await bootstrap()
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
// First, create a file and snapshot it
|
||||
await Filesystem.write(`${tmp.path}/later-ignored.txt`, "initial content")
|
||||
const before = await Snapshot.track()
|
||||
expect(before).toBeTruthy()
|
||||
|
||||
// Modify the file (so it appears in diff-files)
|
||||
await Filesystem.write(`${tmp.path}/later-ignored.txt`, "modified content")
|
||||
|
||||
// Now add gitignore that would exclude this file
|
||||
await Filesystem.write(`${tmp.path}/.gitignore`, "later-ignored.txt\n")
|
||||
|
||||
// Also create another tracked file
|
||||
await Filesystem.write(`${tmp.path}/still-tracked.txt`, "new tracked file")
|
||||
|
||||
const patch = await Snapshot.patch(before!)
|
||||
|
||||
// The file that is now gitignored should NOT appear, even though it was
|
||||
// previously tracked and modified
|
||||
expect(patch.files).not.toContain(fwd(tmp.path, "later-ignored.txt"))
|
||||
|
||||
// The gitignore file itself should appear
|
||||
expect(patch.files).toContain(fwd(tmp.path, ".gitignore"))
|
||||
|
||||
// Other tracked files should appear
|
||||
expect(patch.files).toContain(fwd(tmp.path, "still-tracked.txt"))
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("gitignore updated between track calls filters from diff", async () => {
|
||||
await using tmp = await bootstrap()
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
// a.txt is already committed from bootstrap - track it in snapshot
|
||||
const before = await Snapshot.track()
|
||||
expect(before).toBeTruthy()
|
||||
|
||||
// Modify a.txt (so it appears in diff-files)
|
||||
await Filesystem.write(`${tmp.path}/a.txt`, "modified content")
|
||||
|
||||
// Now add gitignore that would exclude a.txt
|
||||
await Filesystem.write(`${tmp.path}/.gitignore`, "a.txt\n")
|
||||
|
||||
// Also modify b.txt which is not gitignored
|
||||
await Filesystem.write(`${tmp.path}/b.txt`, "also modified")
|
||||
|
||||
// Second track - should not include a.txt even though it changed
|
||||
const after = await Snapshot.track()
|
||||
expect(after).toBeTruthy()
|
||||
|
||||
// Verify a.txt is NOT in the diff between snapshots
|
||||
const diffs = await Snapshot.diffFull(before!, after!)
|
||||
expect(diffs.some((x) => x.file === "a.txt")).toBe(false)
|
||||
|
||||
// But .gitignore should be in the diff
|
||||
expect(diffs.some((x) => x.file === ".gitignore")).toBe(true)
|
||||
|
||||
// b.txt should be in the diff (not gitignored)
|
||||
expect(diffs.some((x) => x.file === "b.txt")).toBe(true)
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test("git info exclude changes", async () => {
|
||||
await using tmp = await bootstrap()
|
||||
await Instance.provide({
|
||||
|
||||
@@ -65,6 +65,18 @@ const readFileTime = (sessionID: SessionID, filepath: string) =>
|
||||
const subscribeBus = <D extends BusEvent.Definition>(def: D, callback: () => unknown) =>
|
||||
runtime.runPromise(Bus.Service.use((bus) => bus.subscribeCallback(def, callback)))
|
||||
|
||||
async function onceBus<D extends BusEvent.Definition>(def: D) {
|
||||
const result = Promise.withResolvers<void>()
|
||||
const unsub = await subscribeBus(def, () => {
|
||||
unsub()
|
||||
result.resolve()
|
||||
})
|
||||
return {
|
||||
wait: result.promise,
|
||||
unsub,
|
||||
}
|
||||
}
|
||||
|
||||
describe("tool.edit", () => {
|
||||
describe("creating new files", () => {
|
||||
test("creates new file when oldString is empty", async () => {
|
||||
@@ -128,23 +140,25 @@ describe("tool.edit", () => {
|
||||
fn: async () => {
|
||||
const { FileWatcher } = await import("../../src/file/watcher")
|
||||
|
||||
const events: string[] = []
|
||||
const unsubUpdated = await subscribeBus(FileWatcher.Event.Updated, () => events.push("updated"))
|
||||
const updated = await onceBus(FileWatcher.Event.Updated)
|
||||
|
||||
const edit = await resolve()
|
||||
await Effect.runPromise(
|
||||
edit.execute(
|
||||
{
|
||||
filePath: filepath,
|
||||
oldString: "",
|
||||
newString: "content",
|
||||
},
|
||||
ctx,
|
||||
),
|
||||
)
|
||||
try {
|
||||
const edit = await resolve()
|
||||
await Effect.runPromise(
|
||||
edit.execute(
|
||||
{
|
||||
filePath: filepath,
|
||||
oldString: "",
|
||||
newString: "content",
|
||||
},
|
||||
ctx,
|
||||
),
|
||||
)
|
||||
|
||||
expect(events).toContain("updated")
|
||||
unsubUpdated()
|
||||
await updated.wait
|
||||
} finally {
|
||||
updated.unsub()
|
||||
}
|
||||
},
|
||||
})
|
||||
})
|
||||
@@ -359,23 +373,25 @@ describe("tool.edit", () => {
|
||||
|
||||
const { FileWatcher } = await import("../../src/file/watcher")
|
||||
|
||||
const events: string[] = []
|
||||
const unsubUpdated = await subscribeBus(FileWatcher.Event.Updated, () => events.push("updated"))
|
||||
const updated = await onceBus(FileWatcher.Event.Updated)
|
||||
|
||||
const edit = await resolve()
|
||||
await Effect.runPromise(
|
||||
edit.execute(
|
||||
{
|
||||
filePath: filepath,
|
||||
oldString: "original",
|
||||
newString: "modified",
|
||||
},
|
||||
ctx,
|
||||
),
|
||||
)
|
||||
try {
|
||||
const edit = await resolve()
|
||||
await Effect.runPromise(
|
||||
edit.execute(
|
||||
{
|
||||
filePath: filepath,
|
||||
oldString: "original",
|
||||
newString: "modified",
|
||||
},
|
||||
ctx,
|
||||
),
|
||||
)
|
||||
|
||||
expect(events).toContain("updated")
|
||||
unsubUpdated()
|
||||
await updated.wait
|
||||
} finally {
|
||||
updated.unsub()
|
||||
}
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,22 +1,26 @@
|
||||
import { describe, expect, test } from "bun:test"
|
||||
import { describe, expect } from "bun:test"
|
||||
import path from "path"
|
||||
import { Effect, Layer, ManagedRuntime } from "effect"
|
||||
import { Effect, Layer } from "effect"
|
||||
import { GrepTool } from "../../src/tool/grep"
|
||||
import { Instance } from "../../src/project/instance"
|
||||
import { tmpdir } from "../fixture/fixture"
|
||||
import { provideInstance, provideTmpdirInstance } from "../fixture/fixture"
|
||||
import { SessionID, MessageID } from "../../src/session/schema"
|
||||
import * as CrossSpawnSpawner from "../../src/effect/cross-spawn-spawner"
|
||||
import { Truncate } from "../../src/tool/truncate"
|
||||
import { Agent } from "../../src/agent/agent"
|
||||
import { Ripgrep } from "../../src/file/ripgrep"
|
||||
import { AppFileSystem } from "../../src/filesystem"
|
||||
import { testEffect } from "../lib/effect"
|
||||
|
||||
const runtime = ManagedRuntime.make(
|
||||
Layer.mergeAll(CrossSpawnSpawner.defaultLayer, Truncate.defaultLayer, Agent.defaultLayer),
|
||||
const it = testEffect(
|
||||
Layer.mergeAll(
|
||||
CrossSpawnSpawner.defaultLayer,
|
||||
AppFileSystem.defaultLayer,
|
||||
Ripgrep.defaultLayer,
|
||||
Truncate.defaultLayer,
|
||||
Agent.defaultLayer,
|
||||
),
|
||||
)
|
||||
|
||||
function initGrep() {
|
||||
return runtime.runPromise(GrepTool.pipe(Effect.flatMap((info) => info.init())))
|
||||
}
|
||||
|
||||
const ctx = {
|
||||
sessionID: SessionID.make("ses_test"),
|
||||
messageID: MessageID.make(""),
|
||||
@@ -31,99 +35,59 @@ const ctx = {
|
||||
const projectRoot = path.join(__dirname, "../..")
|
||||
|
||||
describe("tool.grep", () => {
|
||||
test("basic search", async () => {
|
||||
await Instance.provide({
|
||||
directory: projectRoot,
|
||||
fn: async () => {
|
||||
const grep = await initGrep()
|
||||
const result = await Effect.runPromise(
|
||||
grep.execute(
|
||||
{
|
||||
pattern: "export",
|
||||
path: path.join(projectRoot, "src/tool"),
|
||||
include: "*.ts",
|
||||
},
|
||||
ctx,
|
||||
),
|
||||
)
|
||||
expect(result.metadata.matches).toBeGreaterThan(0)
|
||||
expect(result.output).toContain("Found")
|
||||
},
|
||||
})
|
||||
})
|
||||
it.live("basic search", () =>
|
||||
Effect.gen(function* () {
|
||||
const info = yield* GrepTool
|
||||
const grep = yield* info.init()
|
||||
const result = yield* provideInstance(projectRoot)(
|
||||
grep.execute(
|
||||
{
|
||||
pattern: "export",
|
||||
path: path.join(projectRoot, "src/tool"),
|
||||
include: "*.ts",
|
||||
},
|
||||
ctx,
|
||||
),
|
||||
)
|
||||
expect(result.metadata.matches).toBeGreaterThan(0)
|
||||
expect(result.output).toContain("Found")
|
||||
}),
|
||||
)
|
||||
|
||||
test("no matches returns correct output", async () => {
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
await Bun.write(path.join(dir, "test.txt"), "hello world")
|
||||
},
|
||||
})
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const grep = await initGrep()
|
||||
const result = await Effect.runPromise(
|
||||
grep.execute(
|
||||
{
|
||||
pattern: "xyznonexistentpatternxyz123",
|
||||
path: tmp.path,
|
||||
},
|
||||
ctx,
|
||||
),
|
||||
it.live("no matches returns correct output", () =>
|
||||
provideTmpdirInstance((dir) =>
|
||||
Effect.gen(function* () {
|
||||
yield* Effect.promise(() => Bun.write(path.join(dir, "test.txt"), "hello world"))
|
||||
const info = yield* GrepTool
|
||||
const grep = yield* info.init()
|
||||
const result = yield* grep.execute(
|
||||
{
|
||||
pattern: "xyznonexistentpatternxyz123",
|
||||
path: dir,
|
||||
},
|
||||
ctx,
|
||||
)
|
||||
expect(result.metadata.matches).toBe(0)
|
||||
expect(result.output).toBe("No files found")
|
||||
},
|
||||
})
|
||||
})
|
||||
}),
|
||||
),
|
||||
)
|
||||
|
||||
test("handles CRLF line endings in output", async () => {
|
||||
// This test verifies the regex split handles both \n and \r\n
|
||||
await using tmp = await tmpdir({
|
||||
init: async (dir) => {
|
||||
// Create a test file with content
|
||||
await Bun.write(path.join(dir, "test.txt"), "line1\nline2\nline3")
|
||||
},
|
||||
})
|
||||
await Instance.provide({
|
||||
directory: tmp.path,
|
||||
fn: async () => {
|
||||
const grep = await initGrep()
|
||||
const result = await Effect.runPromise(
|
||||
grep.execute(
|
||||
{
|
||||
pattern: "line",
|
||||
path: tmp.path,
|
||||
},
|
||||
ctx,
|
||||
),
|
||||
it.live("finds matches in tmp instance", () =>
|
||||
provideTmpdirInstance((dir) =>
|
||||
Effect.gen(function* () {
|
||||
yield* Effect.promise(() => Bun.write(path.join(dir, "test.txt"), "line1\nline2\nline3"))
|
||||
const info = yield* GrepTool
|
||||
const grep = yield* info.init()
|
||||
const result = yield* grep.execute(
|
||||
{
|
||||
pattern: "line",
|
||||
path: dir,
|
||||
},
|
||||
ctx,
|
||||
)
|
||||
expect(result.metadata.matches).toBeGreaterThan(0)
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("CRLF regex handling", () => {
|
||||
test("regex correctly splits Unix line endings", () => {
|
||||
const unixOutput = "file1.txt|1|content1\nfile2.txt|2|content2\nfile3.txt|3|content3"
|
||||
const lines = unixOutput.trim().split(/\r?\n/)
|
||||
expect(lines.length).toBe(3)
|
||||
expect(lines[0]).toBe("file1.txt|1|content1")
|
||||
expect(lines[2]).toBe("file3.txt|3|content3")
|
||||
})
|
||||
|
||||
test("regex correctly splits Windows CRLF line endings", () => {
|
||||
const windowsOutput = "file1.txt|1|content1\r\nfile2.txt|2|content2\r\nfile3.txt|3|content3"
|
||||
const lines = windowsOutput.trim().split(/\r?\n/)
|
||||
expect(lines.length).toBe(3)
|
||||
expect(lines[0]).toBe("file1.txt|1|content1")
|
||||
expect(lines[2]).toBe("file3.txt|3|content3")
|
||||
})
|
||||
|
||||
test("regex handles mixed line endings", () => {
|
||||
const mixedOutput = "file1.txt|1|content1\nfile2.txt|2|content2\r\nfile3.txt|3|content3"
|
||||
const lines = mixedOutput.trim().split(/\r?\n/)
|
||||
expect(lines.length).toBe(3)
|
||||
})
|
||||
}),
|
||||
),
|
||||
)
|
||||
})
|
||||
|
||||
@@ -77,6 +77,12 @@ export function createOpencodeClient(config?: Config & { directory?: string; exp
|
||||
workspace: config?.experimental_workspaceID,
|
||||
}),
|
||||
)
|
||||
const result = new OpencodeClient({ client })
|
||||
return result
|
||||
client.interceptors.response.use((response) => {
|
||||
const contentType = response.headers.get("content-type")
|
||||
if (contentType === "text/html")
|
||||
throw new Error("Request is not supported by this version of OpenCode Server (Server responded with text/html)")
|
||||
|
||||
return response
|
||||
})
|
||||
return new OpencodeClient({ client })
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { parsePatchFiles, type FileDiffMetadata } from "@pierre/diffs"
|
||||
import { sampledChecksum } from "@opencode-ai/util/encode"
|
||||
import { formatPatch, structuredPatch } from "diff"
|
||||
import { parseDiffFromFile, type FileDiffMetadata } from "@pierre/diffs"
|
||||
import { formatPatch, parsePatch, structuredPatch } from "diff"
|
||||
import type { SnapshotFileDiff, VcsFileDiff } from "@opencode-ai/sdk/v2"
|
||||
|
||||
type LegacyDiff = {
|
||||
@@ -41,26 +40,50 @@ function empty(file: string, key: string) {
|
||||
}
|
||||
|
||||
function patch(diff: ReviewDiff) {
|
||||
if (typeof diff.patch === "string") return diff.patch
|
||||
return formatPatch(
|
||||
structuredPatch(
|
||||
diff.file,
|
||||
diff.file,
|
||||
"before" in diff && typeof diff.before === "string" ? diff.before : "",
|
||||
"after" in diff && typeof diff.after === "string" ? diff.after : "",
|
||||
"",
|
||||
"",
|
||||
{ context: Number.MAX_SAFE_INTEGER },
|
||||
if (typeof diff.patch === "string") {
|
||||
const [patch] = parsePatch(diff.patch)
|
||||
|
||||
const beforeLines = []
|
||||
const afterLines = []
|
||||
|
||||
for (const hunk of patch.hunks) {
|
||||
for (const line of hunk.lines) {
|
||||
if (line.startsWith("-")) {
|
||||
beforeLines.push(line.slice(1))
|
||||
} else if (line.startsWith("+")) {
|
||||
afterLines.push(line.slice(1))
|
||||
} else {
|
||||
// context line (starts with ' ')
|
||||
beforeLines.push(line.slice(1))
|
||||
afterLines.push(line.slice(1))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { before: beforeLines.join("\n"), after: afterLines.join("\n"), patch: diff.patch }
|
||||
}
|
||||
return {
|
||||
before: "before" in diff && typeof diff.before === "string" ? diff.before : "",
|
||||
after: "after" in diff && typeof diff.after === "string" ? diff.after : "",
|
||||
patch: formatPatch(
|
||||
structuredPatch(
|
||||
diff.file,
|
||||
diff.file,
|
||||
"before" in diff && typeof diff.before === "string" ? diff.before : "",
|
||||
"after" in diff && typeof diff.after === "string" ? diff.after : "",
|
||||
"",
|
||||
"",
|
||||
{ context: Number.MAX_SAFE_INTEGER },
|
||||
),
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
function file(file: string, patch: string) {
|
||||
function file(file: string, patch: string, before: string, after: string) {
|
||||
const hit = cache.get(patch)
|
||||
if (hit) return hit
|
||||
|
||||
const key = sampledChecksum(patch) ?? file
|
||||
const value = parsePatchFiles(patch, key).flatMap((item) => item.files)[0] ?? empty(file, key)
|
||||
const value = parseDiffFromFile({ name: file, contents: before }, { name: file, contents: after })
|
||||
cache.set(patch, value)
|
||||
return value
|
||||
}
|
||||
@@ -69,11 +92,11 @@ export function normalize(diff: ReviewDiff): ViewDiff {
|
||||
const next = patch(diff)
|
||||
return {
|
||||
file: diff.file,
|
||||
patch: next,
|
||||
patch: next.patch,
|
||||
additions: diff.additions,
|
||||
deletions: diff.deletions,
|
||||
status: diff.status,
|
||||
fileDiff: file(diff.file, next),
|
||||
fileDiff: file(diff.file, next.patch, next.before, next.after),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user