mirror of
https://fastgit.cc/https://github.com/anomalyco/opencode
synced 2026-04-21 13:21:17 +08:00
refactor: destroy Storage facades (#21956)
This commit is contained in:
@@ -308,3 +308,32 @@ Current raw fs users that will convert during tool migration:
|
||||
- [ ] `util/flock.ts` — file-based distributed lock with heartbeat → Effect.repeat + addFinalizer
|
||||
- [ ] `util/process.ts` — child process spawn wrapper → return Effect instead of Promise
|
||||
- [ ] `util/lazy.ts` — replace uses in Effect code with Effect.cached; keep for sync-only code
|
||||
|
||||
## Destroying the facades
|
||||
|
||||
Every service currently exports async facade functions at the bottom of its namespace — `export async function read(...) { return runPromise(...) }` — backed by a per-service `makeRuntime`. These exist because cyclic imports used to force each service to build its own independent runtime. Now that the layer DAG is acyclic and `AppRuntime` (`src/effect/app-runtime.ts`) composes everything into one `ManagedRuntime`, we're removing them.
|
||||
|
||||
### Process
|
||||
|
||||
For each service, the migration is roughly:
|
||||
|
||||
1. **Find callers.** `grep -n "Namespace\.(methodA|methodB|...)"` across `src/` and `test/`. Skip the service file itself.
|
||||
2. **Migrate production callers.** For each effectful caller that does `Effect.tryPromise(() => Namespace.method(...))`:
|
||||
- Add the service to the caller's layer R type (`Layer.Layer<Self, never, ... | Namespace.Service>`)
|
||||
- Yield it at the top of the layer: `const ns = yield* Namespace.Service`
|
||||
- Replace `Effect.tryPromise(() => Namespace.method(...))` with `yield* ns.method(...)` (or `ns.method(...).pipe(Effect.orElseSucceed(...))` for the common fallback case)
|
||||
- Add `Layer.provide(Namespace.defaultLayer)` to the caller's own `defaultLayer` chain
|
||||
3. **Fix tests that used the caller's raw `.layer`.** Any test that composes `Caller.layer` (not `defaultLayer`) needs to also provide the newly-required service tag. The fastest fix is usually switching to `Caller.defaultLayer` since it now pulls in the new dependency.
|
||||
4. **Migrate test callers of the facade.** Tests calling `Namespace.method(...)` directly get converted to full effectful style using `testEffect(Namespace.defaultLayer)` + `it.live` / `it.effect` + `yield* svc.method(...)`. Don't wrap the test body in `Effect.promise(async () => {...})` — do the whole thing in `Effect.gen` and use `AppFileSystem.Service` / `tmpdirScoped` / `Effect.addFinalizer` for what used to be raw `fs` / `Bun.write` / `try/finally`.
|
||||
5. **Delete the facades.** Once `grep` shows zero callers, remove the `export async function` block AND the `makeRuntime(...)` line from the service namespace. Also remove the now-unused `import { makeRuntime }`.
|
||||
|
||||
### Pitfalls
|
||||
|
||||
- **Layer caching inside tests.** `testEffect(layer)` constructs the Storage (or whatever) service once and memoizes it. If a test then tries `inner.pipe(Effect.provide(customStorage))` to swap in a differently-configured Storage, the outer cached one wins and the inner provision is a no-op. Fix: wrap the overriding layer in `Layer.fresh(...)`, which forces a new instance to be built instead of hitting the memoMap cache. This lets a single `testEffect(...)` serve both simple and per-test-customized cases.
|
||||
- **`Effect.tryPromise` → `yield*` drops the Promise layer.** The old code was `Effect.tryPromise(() => Storage.read(...))` — a `tryPromise` wrapper because the facade returned a Promise. The new code is `yield* storage.read(...)` directly — the service method already returns an Effect, so no wrapper is needed. Don't reach for `Effect.promise` or `Effect.tryPromise` during migration; if you're using them on a service method call, you're doing it wrong.
|
||||
- **Raw `.layer` test callers break silently in the type checker.** When you add a new R requirement to a service's `.layer`, any test that composes it raw (not `defaultLayer`) becomes under-specified. `tsgo` will flag this — the error looks like `Type 'Storage.Service' is not assignable to type '... | Service | TestConsole'`. Usually the fix is to switch that composition to `defaultLayer`, or add `Layer.provide(NewDep.defaultLayer)` to the custom composition.
|
||||
- **Tests that do async setup with `fs`, `Bun.write`, `tmpdir`.** Convert these to `AppFileSystem.Service` calls inside `Effect.gen`, and use `tmpdirScoped()` instead of `tmpdir()` so cleanup happens via the scope finalizer. For file operations on the actual filesystem (not via a service), a small helper like `const writeJson = Effect.fnUntraced(function* (file, value) { const fs = yield* AppFileSystem.Service; yield* fs.makeDirectory(path.dirname(file), { recursive: true }); yield* fs.writeFileString(file, JSON.stringify(value, null, 2)) })` keeps the migration tests clean.
|
||||
|
||||
### Migration log
|
||||
|
||||
- `Storage` — migrated 2026-04-10. One production caller (`Session.diff`) and all storage.test.ts tests converted to effectful style. Facades and `makeRuntime` removed.
|
||||
|
||||
@@ -361,10 +361,11 @@ export namespace Session {
|
||||
const db = <T>(fn: (d: Parameters<typeof Database.use>[0] extends (trx: infer D) => any ? D : never) => T) =>
|
||||
Effect.sync(() => Database.use(fn))
|
||||
|
||||
export const layer: Layer.Layer<Service, never, Bus.Service> = Layer.effect(
|
||||
export const layer: Layer.Layer<Service, never, Bus.Service | Storage.Service> = Layer.effect(
|
||||
Service,
|
||||
Effect.gen(function* () {
|
||||
const bus = yield* Bus.Service
|
||||
const storage = yield* Storage.Service
|
||||
|
||||
const createNext = Effect.fn("Session.createNext")(function* (input: {
|
||||
id?: SessionID
|
||||
@@ -585,7 +586,7 @@ export namespace Session {
|
||||
})
|
||||
|
||||
const diff = Effect.fn("Session.diff")(function* (sessionID: SessionID) {
|
||||
return yield* Effect.tryPromise(() => Storage.read<Snapshot.FileDiff[]>(["session_diff", sessionID])).pipe(
|
||||
return yield* storage.read<Snapshot.FileDiff[]>(["session_diff", sessionID]).pipe(
|
||||
Effect.orElseSucceed((): Snapshot.FileDiff[] => []),
|
||||
)
|
||||
})
|
||||
@@ -660,7 +661,7 @@ export namespace Session {
|
||||
}),
|
||||
)
|
||||
|
||||
export const defaultLayer = layer.pipe(Layer.provide(Bus.layer))
|
||||
export const defaultLayer = layer.pipe(Layer.provide(Bus.layer), Layer.provide(Storage.defaultLayer))
|
||||
|
||||
const { runPromise } = makeRuntime(Service, defaultLayer)
|
||||
|
||||
|
||||
@@ -4,7 +4,6 @@ import { Global } from "../global"
|
||||
import { NamedError } from "@opencode-ai/util/error"
|
||||
import z from "zod"
|
||||
import { AppFileSystem } from "@/filesystem"
|
||||
import { makeRuntime } from "@/effect/run-service"
|
||||
import { Effect, Exit, Layer, Option, RcMap, Schema, ServiceMap, TxReentrantLock } from "effect"
|
||||
import { Git } from "@/git"
|
||||
|
||||
@@ -331,26 +330,4 @@ export namespace Storage {
|
||||
)
|
||||
|
||||
export const defaultLayer = layer.pipe(Layer.provide(AppFileSystem.defaultLayer), Layer.provide(Git.defaultLayer))
|
||||
|
||||
const { runPromise } = makeRuntime(Service, defaultLayer)
|
||||
|
||||
export async function remove(key: string[]) {
|
||||
return runPromise((svc) => svc.remove(key))
|
||||
}
|
||||
|
||||
export async function read<T>(key: string[]) {
|
||||
return runPromise((svc) => svc.read<T>(key))
|
||||
}
|
||||
|
||||
export async function update<T>(key: string[], fn: (draft: T) => void) {
|
||||
return runPromise((svc) => svc.update<T>(key, fn))
|
||||
}
|
||||
|
||||
export async function write<T>(key: string[], content: T) {
|
||||
return runPromise((svc) => svc.write(key, content))
|
||||
}
|
||||
|
||||
export async function list(prefix: string[]) {
|
||||
return runPromise((svc) => svc.list(prefix))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,6 +13,7 @@ import { Provider } from "../../src/provider/provider"
|
||||
import { Session } from "../../src/session"
|
||||
import type { SessionID } from "../../src/session/schema"
|
||||
import { ShareNext } from "../../src/share/share-next"
|
||||
import { Storage } from "../../src/storage/storage"
|
||||
import { SessionShareTable } from "../../src/share/share.sql"
|
||||
import { Database, eq } from "../../src/storage/db"
|
||||
import { provideTmpdirInstance } from "../fixture/fixture"
|
||||
@@ -55,7 +56,7 @@ function wired(client: HttpClient.HttpClient) {
|
||||
return Layer.mergeAll(
|
||||
Bus.layer,
|
||||
ShareNext.layer,
|
||||
Session.layer,
|
||||
Session.defaultLayer,
|
||||
AccountRepo.layer,
|
||||
NodeFileSystem.layer,
|
||||
CrossSpawnSpawner.defaultLayer,
|
||||
|
||||
@@ -1,296 +1,296 @@
|
||||
import { describe, expect, test } from "bun:test"
|
||||
import fs from "fs/promises"
|
||||
import { describe, expect } from "bun:test"
|
||||
import path from "path"
|
||||
import { Effect, Layer, ManagedRuntime } from "effect"
|
||||
import { Effect, Exit, Layer } from "effect"
|
||||
import { AppFileSystem } from "../../src/filesystem"
|
||||
import * as CrossSpawnSpawner from "../../src/effect/cross-spawn-spawner"
|
||||
import { Git } from "../../src/git"
|
||||
import { Global } from "../../src/global"
|
||||
import { Storage } from "../../src/storage/storage"
|
||||
import { tmpdir } from "../fixture/fixture"
|
||||
import { tmpdirScoped } from "../fixture/fixture"
|
||||
import { testEffect } from "../lib/effect"
|
||||
|
||||
const dir = path.join(Global.Path.data, "storage")
|
||||
|
||||
async function withScope<T>(fn: (root: string[]) => Promise<T>) {
|
||||
const root = ["storage_test", crypto.randomUUID()]
|
||||
try {
|
||||
return await fn(root)
|
||||
} finally {
|
||||
await fs.rm(path.join(dir, ...root), { recursive: true, force: true })
|
||||
}
|
||||
}
|
||||
const it = testEffect(
|
||||
Layer.mergeAll(Storage.defaultLayer, AppFileSystem.defaultLayer, CrossSpawnSpawner.defaultLayer),
|
||||
)
|
||||
|
||||
function map(root: string, file: string) {
|
||||
const scope = Effect.fnUntraced(function* () {
|
||||
const root = ["storage_test", crypto.randomUUID()]
|
||||
const fs = yield* AppFileSystem.Service
|
||||
const svc = yield* Storage.Service
|
||||
yield* Effect.addFinalizer(() =>
|
||||
fs.remove(path.join(dir, ...root), { recursive: true, force: true }).pipe(Effect.ignore),
|
||||
)
|
||||
return { root, svc }
|
||||
})
|
||||
|
||||
// remap(root) rewrites any path under Global.Path.data to live under `root` instead.
|
||||
// Used by remappedFs to build an AppFileSystem that Storage thinks is the real global
|
||||
// data dir but actually targets a tmp dir — letting migration tests stage legacy layouts.
|
||||
// NOTE: only the 6 methods below are intercepted. If Storage starts using a different
|
||||
// AppFileSystem method that touches Global.Path.data, add it here.
|
||||
function remap(root: string, file: string) {
|
||||
if (file === Global.Path.data) return root
|
||||
if (file.startsWith(Global.Path.data + path.sep)) return path.join(root, path.relative(Global.Path.data, file))
|
||||
return file
|
||||
}
|
||||
|
||||
function layer(root: string) {
|
||||
function remappedFs(root: string) {
|
||||
return Layer.effect(
|
||||
AppFileSystem.Service,
|
||||
Effect.gen(function* () {
|
||||
const fs = yield* AppFileSystem.Service
|
||||
return AppFileSystem.Service.of({
|
||||
...fs,
|
||||
isDir: (file) => fs.isDir(map(root, file)),
|
||||
readJson: (file) => fs.readJson(map(root, file)),
|
||||
writeWithDirs: (file, content, mode) => fs.writeWithDirs(map(root, file), content, mode),
|
||||
readFileString: (file) => fs.readFileString(map(root, file)),
|
||||
remove: (file) => fs.remove(map(root, file)),
|
||||
isDir: (file) => fs.isDir(remap(root, file)),
|
||||
readJson: (file) => fs.readJson(remap(root, file)),
|
||||
writeWithDirs: (file, content, mode) => fs.writeWithDirs(remap(root, file), content, mode),
|
||||
readFileString: (file) => fs.readFileString(remap(root, file)),
|
||||
remove: (file) => fs.remove(remap(root, file)),
|
||||
glob: (pattern, options) =>
|
||||
fs.glob(pattern, options?.cwd ? { ...options, cwd: map(root, options.cwd) } : options),
|
||||
fs.glob(pattern, options?.cwd ? { ...options, cwd: remap(root, options.cwd) } : options),
|
||||
})
|
||||
}),
|
||||
).pipe(Layer.provide(AppFileSystem.defaultLayer))
|
||||
}
|
||||
|
||||
async function withStorage<T>(
|
||||
root: string,
|
||||
fn: (run: <A, E>(body: Effect.Effect<A, E, Storage.Service>) => Promise<A>) => Promise<T>,
|
||||
) {
|
||||
const rt = ManagedRuntime.make(Storage.layer.pipe(Layer.provide(layer(root)), Layer.provide(Git.defaultLayer)))
|
||||
try {
|
||||
return await fn((body) => rt.runPromise(body))
|
||||
} finally {
|
||||
await rt.dispose()
|
||||
}
|
||||
}
|
||||
|
||||
async function write(file: string, value: unknown) {
|
||||
await fs.mkdir(path.dirname(file), { recursive: true })
|
||||
await Bun.write(file, JSON.stringify(value, null, 2))
|
||||
}
|
||||
|
||||
async function text(file: string, value: string) {
|
||||
await fs.mkdir(path.dirname(file), { recursive: true })
|
||||
await Bun.write(file, value)
|
||||
}
|
||||
|
||||
async function exists(file: string) {
|
||||
return fs
|
||||
.stat(file)
|
||||
.then(() => true)
|
||||
.catch(() => false)
|
||||
}
|
||||
// Layer.fresh forces a new Storage instance — without it, Effect's in-test layer cache
|
||||
// returns the outer testEffect's Storage (which uses the real AppFileSystem), not a new
|
||||
// one built on top of remappedFs.
|
||||
const remappedStorage = (root: string) =>
|
||||
Layer.fresh(Storage.layer.pipe(Layer.provide(remappedFs(root)), Layer.provide(Git.defaultLayer)))
|
||||
|
||||
describe("Storage", () => {
|
||||
test("round-trips JSON content", async () => {
|
||||
await withScope(async (root) => {
|
||||
it.live("round-trips JSON content", () =>
|
||||
Effect.gen(function* () {
|
||||
const { root, svc } = yield* scope()
|
||||
const key = [...root, "session_diff", "roundtrip"]
|
||||
const value = [{ file: "a.ts", additions: 2, deletions: 1 }]
|
||||
|
||||
await Storage.write(key, value)
|
||||
yield* svc.write(key, value)
|
||||
expect(yield* svc.read<typeof value>(key)).toEqual(value)
|
||||
}),
|
||||
)
|
||||
|
||||
expect(await Storage.read<typeof value>(key)).toEqual(value)
|
||||
})
|
||||
})
|
||||
it.live("maps missing reads to NotFoundError", () =>
|
||||
Effect.gen(function* () {
|
||||
const { root, svc } = yield* scope()
|
||||
const exit = yield* svc.read([...root, "missing", "value"]).pipe(Effect.exit)
|
||||
expect(Exit.isFailure(exit)).toBe(true)
|
||||
}),
|
||||
)
|
||||
|
||||
test("maps missing reads to NotFoundError", async () => {
|
||||
await withScope(async (root) => {
|
||||
await expect(Storage.read([...root, "missing", "value"])).rejects.toMatchObject({ name: "NotFoundError" })
|
||||
})
|
||||
})
|
||||
|
||||
test("update on missing key throws NotFoundError", async () => {
|
||||
await withScope(async (root) => {
|
||||
await expect(
|
||||
Storage.update<{ value: number }>([...root, "missing", "key"], (draft) => {
|
||||
it.live("update on missing key throws NotFoundError", () =>
|
||||
Effect.gen(function* () {
|
||||
const { root, svc } = yield* scope()
|
||||
const exit = yield* svc
|
||||
.update<{ value: number }>([...root, "missing", "key"], (draft) => {
|
||||
draft.value += 1
|
||||
}),
|
||||
).rejects.toMatchObject({ name: "NotFoundError" })
|
||||
})
|
||||
})
|
||||
})
|
||||
.pipe(Effect.exit)
|
||||
expect(Exit.isFailure(exit)).toBe(true)
|
||||
}),
|
||||
)
|
||||
|
||||
test("write overwrites existing value", async () => {
|
||||
await withScope(async (root) => {
|
||||
it.live("write overwrites existing value", () =>
|
||||
Effect.gen(function* () {
|
||||
const { root, svc } = yield* scope()
|
||||
const key = [...root, "overwrite", "test"]
|
||||
await Storage.write<{ v: number }>(key, { v: 1 })
|
||||
await Storage.write<{ v: number }>(key, { v: 2 })
|
||||
|
||||
expect(await Storage.read<{ v: number }>(key)).toEqual({ v: 2 })
|
||||
})
|
||||
})
|
||||
yield* svc.write<{ v: number }>(key, { v: 1 })
|
||||
yield* svc.write<{ v: number }>(key, { v: 2 })
|
||||
|
||||
test("remove on missing key is a no-op", async () => {
|
||||
await withScope(async (root) => {
|
||||
await expect(Storage.remove([...root, "nonexistent", "key"])).resolves.toBeUndefined()
|
||||
})
|
||||
})
|
||||
expect(yield* svc.read<{ v: number }>(key)).toEqual({ v: 2 })
|
||||
}),
|
||||
)
|
||||
|
||||
test("list on missing prefix returns empty", async () => {
|
||||
await withScope(async (root) => {
|
||||
expect(await Storage.list([...root, "nonexistent"])).toEqual([])
|
||||
})
|
||||
})
|
||||
it.live("remove on missing key is a no-op", () =>
|
||||
Effect.gen(function* () {
|
||||
const { root, svc } = yield* scope()
|
||||
yield* svc.remove([...root, "nonexistent", "key"])
|
||||
}),
|
||||
)
|
||||
|
||||
test("serializes concurrent updates for the same key", async () => {
|
||||
await withScope(async (root) => {
|
||||
it.live("list on missing prefix returns empty", () =>
|
||||
Effect.gen(function* () {
|
||||
const { root, svc } = yield* scope()
|
||||
expect(yield* svc.list([...root, "nonexistent"])).toEqual([])
|
||||
}),
|
||||
)
|
||||
|
||||
it.live("serializes concurrent updates for the same key", () =>
|
||||
Effect.gen(function* () {
|
||||
const { root, svc } = yield* scope()
|
||||
const key = [...root, "counter", "shared"]
|
||||
await Storage.write(key, { value: 0 })
|
||||
|
||||
await Promise.all(
|
||||
yield* svc.write(key, { value: 0 })
|
||||
|
||||
yield* Effect.all(
|
||||
Array.from({ length: 25 }, () =>
|
||||
Storage.update<{ value: number }>(key, (draft) => {
|
||||
svc.update<{ value: number }>(key, (draft) => {
|
||||
draft.value += 1
|
||||
}),
|
||||
),
|
||||
{ concurrency: "unbounded" },
|
||||
)
|
||||
|
||||
expect(await Storage.read<{ value: number }>(key)).toEqual({ value: 25 })
|
||||
})
|
||||
})
|
||||
expect(yield* svc.read<{ value: number }>(key)).toEqual({ value: 25 })
|
||||
}),
|
||||
)
|
||||
|
||||
test("concurrent reads do not block each other", async () => {
|
||||
await withScope(async (root) => {
|
||||
it.live("concurrent reads do not block each other", () =>
|
||||
Effect.gen(function* () {
|
||||
const { root, svc } = yield* scope()
|
||||
const key = [...root, "concurrent", "reads"]
|
||||
await Storage.write(key, { ok: true })
|
||||
|
||||
const results = await Promise.all(Array.from({ length: 10 }, () => Storage.read(key)))
|
||||
yield* svc.write(key, { ok: true })
|
||||
|
||||
const results = yield* Effect.all(
|
||||
Array.from({ length: 10 }, () => svc.read(key)),
|
||||
{ concurrency: "unbounded" },
|
||||
)
|
||||
|
||||
expect(results).toHaveLength(10)
|
||||
for (const r of results) expect(r).toEqual({ ok: true })
|
||||
})
|
||||
})
|
||||
}),
|
||||
)
|
||||
|
||||
test("nested keys create deep paths", async () => {
|
||||
await withScope(async (root) => {
|
||||
it.live("nested keys create deep paths", () =>
|
||||
Effect.gen(function* () {
|
||||
const { root, svc } = yield* scope()
|
||||
const key = [...root, "a", "b", "c", "deep"]
|
||||
await Storage.write<{ nested: boolean }>(key, { nested: true })
|
||||
|
||||
expect(await Storage.read<{ nested: boolean }>(key)).toEqual({ nested: true })
|
||||
expect(await Storage.list([...root, "a"])).toEqual([key])
|
||||
})
|
||||
})
|
||||
yield* svc.write<{ nested: boolean }>(key, { nested: true })
|
||||
|
||||
test("lists and removes stored entries", async () => {
|
||||
await withScope(async (root) => {
|
||||
expect(yield* svc.read<{ nested: boolean }>(key)).toEqual({ nested: true })
|
||||
expect(yield* svc.list([...root, "a"])).toEqual([key])
|
||||
}),
|
||||
)
|
||||
|
||||
it.live("lists and removes stored entries", () =>
|
||||
Effect.gen(function* () {
|
||||
const { root, svc } = yield* scope()
|
||||
const a = [...root, "list", "a"]
|
||||
const b = [...root, "list", "b"]
|
||||
const prefix = [...root, "list"]
|
||||
|
||||
await Storage.write(b, { value: 2 })
|
||||
await Storage.write(a, { value: 1 })
|
||||
yield* svc.write(b, { value: 2 })
|
||||
yield* svc.write(a, { value: 1 })
|
||||
|
||||
expect(await Storage.list(prefix)).toEqual([a, b])
|
||||
expect(yield* svc.list(prefix)).toEqual([a, b])
|
||||
|
||||
await Storage.remove(a)
|
||||
yield* svc.remove(a)
|
||||
|
||||
expect(await Storage.list(prefix)).toEqual([b])
|
||||
await expect(Storage.read(a)).rejects.toMatchObject({ name: "NotFoundError" })
|
||||
})
|
||||
})
|
||||
expect(yield* svc.list(prefix)).toEqual([b])
|
||||
const exit = yield* svc.read(a).pipe(Effect.exit)
|
||||
expect(Exit.isFailure(exit)).toBe(true)
|
||||
}),
|
||||
)
|
||||
|
||||
test("migration 2 runs when marker contents are invalid", async () => {
|
||||
await using tmp = await tmpdir()
|
||||
const storage = path.join(tmp.path, "storage")
|
||||
const diffs = [
|
||||
{ additions: 2, deletions: 1 },
|
||||
{ additions: 3, deletions: 4 },
|
||||
]
|
||||
it.live("migration 2 runs when marker contents are invalid", () =>
|
||||
Effect.gen(function* () {
|
||||
const fs = yield* AppFileSystem.Service
|
||||
const tmp = yield* tmpdirScoped()
|
||||
const storage = path.join(tmp, "storage")
|
||||
const diffs = [
|
||||
{ additions: 2, deletions: 1 },
|
||||
{ additions: 3, deletions: 4 },
|
||||
]
|
||||
|
||||
await text(path.join(storage, "migration"), "wat")
|
||||
await write(path.join(storage, "session", "proj_test", "ses_test.json"), {
|
||||
id: "ses_test",
|
||||
projectID: "proj_test",
|
||||
title: "legacy",
|
||||
summary: { diffs },
|
||||
})
|
||||
|
||||
await withStorage(tmp.path, async (run) => {
|
||||
expect(await run(Storage.Service.use((svc) => svc.list(["session_diff"])))).toEqual([
|
||||
["session_diff", "ses_test"],
|
||||
])
|
||||
expect(await run(Storage.Service.use((svc) => svc.read<typeof diffs>(["session_diff", "ses_test"])))).toEqual(
|
||||
diffs,
|
||||
yield* fs.writeWithDirs(path.join(storage, "migration"), "wat")
|
||||
yield* fs.writeWithDirs(
|
||||
path.join(storage, "session", "proj_test", "ses_test.json"),
|
||||
JSON.stringify({
|
||||
id: "ses_test",
|
||||
projectID: "proj_test",
|
||||
title: "legacy",
|
||||
summary: { diffs },
|
||||
}),
|
||||
)
|
||||
expect(
|
||||
await run(
|
||||
Storage.Service.use((svc) =>
|
||||
svc.read<{
|
||||
id: string
|
||||
projectID: string
|
||||
title: string
|
||||
summary: {
|
||||
additions: number
|
||||
deletions: number
|
||||
}
|
||||
}>(["session", "proj_test", "ses_test"]),
|
||||
),
|
||||
),
|
||||
).toEqual({
|
||||
id: "ses_test",
|
||||
projectID: "proj_test",
|
||||
title: "legacy",
|
||||
summary: {
|
||||
additions: 5,
|
||||
deletions: 5,
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
expect(await Bun.file(path.join(storage, "migration")).text()).toBe("2")
|
||||
})
|
||||
yield* Effect.gen(function* () {
|
||||
const svc = yield* Storage.Service
|
||||
expect(yield* svc.list(["session_diff"])).toEqual([["session_diff", "ses_test"]])
|
||||
expect(yield* svc.read<typeof diffs>(["session_diff", "ses_test"])).toEqual(diffs)
|
||||
expect(
|
||||
yield* svc.read<{
|
||||
id: string
|
||||
projectID: string
|
||||
title: string
|
||||
summary: { additions: number; deletions: number }
|
||||
}>(["session", "proj_test", "ses_test"]),
|
||||
).toEqual({
|
||||
id: "ses_test",
|
||||
projectID: "proj_test",
|
||||
title: "legacy",
|
||||
summary: { additions: 5, deletions: 5 },
|
||||
})
|
||||
}).pipe(Effect.provide(remappedStorage(tmp)))
|
||||
|
||||
test("migration 1 tolerates malformed legacy records", async () => {
|
||||
await using tmp = await tmpdir({ git: true })
|
||||
const storage = path.join(tmp.path, "storage")
|
||||
const legacy = path.join(tmp.path, "project", "legacy")
|
||||
expect(yield* fs.readFileString(path.join(storage, "migration"))).toBe("2")
|
||||
}),
|
||||
)
|
||||
|
||||
await write(path.join(legacy, "storage", "session", "message", "probe", "0.json"), [])
|
||||
await write(path.join(legacy, "storage", "session", "message", "probe", "1.json"), {
|
||||
path: { root: tmp.path },
|
||||
})
|
||||
await write(path.join(legacy, "storage", "session", "info", "ses_legacy.json"), {
|
||||
id: "ses_legacy",
|
||||
title: "legacy",
|
||||
})
|
||||
await write(path.join(legacy, "storage", "session", "message", "ses_legacy", "msg_legacy.json"), {
|
||||
role: "user",
|
||||
text: "hello",
|
||||
})
|
||||
it.live("migration 1 tolerates malformed legacy records", () =>
|
||||
Effect.gen(function* () {
|
||||
const fs = yield* AppFileSystem.Service
|
||||
const tmp = yield* tmpdirScoped({ git: true })
|
||||
const storage = path.join(tmp, "storage")
|
||||
const legacy = path.join(tmp, "project", "legacy")
|
||||
|
||||
await withStorage(tmp.path, async (run) => {
|
||||
const projects = await run(Storage.Service.use((svc) => svc.list(["project"])))
|
||||
expect(projects).toHaveLength(1)
|
||||
const project = projects[0]![1]
|
||||
yield* fs.writeWithDirs(path.join(legacy, "storage", "session", "message", "probe", "0.json"), "[]")
|
||||
yield* fs.writeWithDirs(
|
||||
path.join(legacy, "storage", "session", "message", "probe", "1.json"),
|
||||
JSON.stringify({ path: { root: tmp } }),
|
||||
)
|
||||
yield* fs.writeWithDirs(
|
||||
path.join(legacy, "storage", "session", "info", "ses_legacy.json"),
|
||||
JSON.stringify({ id: "ses_legacy", title: "legacy" }),
|
||||
)
|
||||
yield* fs.writeWithDirs(
|
||||
path.join(legacy, "storage", "session", "message", "ses_legacy", "msg_legacy.json"),
|
||||
JSON.stringify({ role: "user", text: "hello" }),
|
||||
)
|
||||
|
||||
expect(await run(Storage.Service.use((svc) => svc.list(["session", project])))).toEqual([
|
||||
["session", project, "ses_legacy"],
|
||||
])
|
||||
expect(
|
||||
await run(
|
||||
Storage.Service.use((svc) => svc.read<{ id: string; title: string }>(["session", project, "ses_legacy"])),
|
||||
),
|
||||
).toEqual({
|
||||
id: "ses_legacy",
|
||||
title: "legacy",
|
||||
})
|
||||
expect(
|
||||
await run(
|
||||
Storage.Service.use((svc) =>
|
||||
svc.read<{ role: string; text: string }>(["message", "ses_legacy", "msg_legacy"]),
|
||||
),
|
||||
),
|
||||
).toEqual({
|
||||
role: "user",
|
||||
text: "hello",
|
||||
})
|
||||
})
|
||||
yield* Effect.gen(function* () {
|
||||
const svc = yield* Storage.Service
|
||||
const projects = yield* svc.list(["project"])
|
||||
expect(projects).toHaveLength(1)
|
||||
const project = projects[0]![1]
|
||||
|
||||
expect(await Bun.file(path.join(storage, "migration")).text()).toBe("2")
|
||||
})
|
||||
expect(yield* svc.list(["session", project])).toEqual([["session", project, "ses_legacy"]])
|
||||
expect(yield* svc.read<{ id: string; title: string }>(["session", project, "ses_legacy"])).toEqual({
|
||||
id: "ses_legacy",
|
||||
title: "legacy",
|
||||
})
|
||||
expect(yield* svc.read<{ role: string; text: string }>(["message", "ses_legacy", "msg_legacy"])).toEqual({
|
||||
role: "user",
|
||||
text: "hello",
|
||||
})
|
||||
}).pipe(Effect.provide(remappedStorage(tmp)))
|
||||
|
||||
test("failed migrations do not advance the marker", async () => {
|
||||
await using tmp = await tmpdir()
|
||||
const storage = path.join(tmp.path, "storage")
|
||||
const legacy = path.join(tmp.path, "project", "legacy")
|
||||
expect(yield* fs.readFileString(path.join(storage, "migration"))).toBe("2")
|
||||
}),
|
||||
)
|
||||
|
||||
await text(path.join(legacy, "storage", "session", "message", "probe", "0.json"), "{")
|
||||
it.live("failed migrations do not advance the marker", () =>
|
||||
Effect.gen(function* () {
|
||||
const fs = yield* AppFileSystem.Service
|
||||
const tmp = yield* tmpdirScoped()
|
||||
const storage = path.join(tmp, "storage")
|
||||
const legacy = path.join(tmp, "project", "legacy")
|
||||
|
||||
await withStorage(tmp.path, async (run) => {
|
||||
expect(await run(Storage.Service.use((svc) => svc.list(["project"])))).toEqual([])
|
||||
})
|
||||
yield* fs.writeWithDirs(path.join(legacy, "storage", "session", "message", "probe", "0.json"), "{")
|
||||
|
||||
expect(await exists(path.join(storage, "migration"))).toBe(false)
|
||||
})
|
||||
yield* Effect.gen(function* () {
|
||||
const svc = yield* Storage.Service
|
||||
expect(yield* svc.list(["project"])).toEqual([])
|
||||
}).pipe(Effect.provide(remappedStorage(tmp)))
|
||||
|
||||
const exit = yield* fs.access(path.join(storage, "migration")).pipe(Effect.exit)
|
||||
expect(Exit.isFailure(exit)).toBe(true)
|
||||
}),
|
||||
)
|
||||
})
|
||||
|
||||
|
||||
Reference in New Issue
Block a user