mirror of
https://fastgit.cc/https://github.com/anomalyco/opencode
synced 2026-04-21 05:10:58 +08:00
fix: narrow several from any type assertions in opencode core (#22926)
This commit is contained in:
@@ -12,7 +12,7 @@ export const { use: useKV, provider: KVProvider } = createSimpleContext({
|
||||
const [store, setStore] = createStore<Record<string, any>>()
|
||||
const filePath = path.join(Global.Path.state, "kv.json")
|
||||
|
||||
Filesystem.readJson(filePath)
|
||||
Filesystem.readJson<Record<string, any>>(filePath)
|
||||
.then((x) => {
|
||||
setStore(x)
|
||||
})
|
||||
|
||||
@@ -28,10 +28,10 @@ export function FormatError(input: unknown) {
|
||||
// ProviderModelNotFoundError: { providerID: string, modelID: string, suggestions?: string[] }
|
||||
if (NamedError.hasName(input, "ProviderModelNotFoundError")) {
|
||||
const data = (input as ErrorLike).data
|
||||
const suggestions = data?.suggestions as string[] | undefined
|
||||
const suggestions: string[] = Array.isArray(data?.suggestions) ? data.suggestions : []
|
||||
return [
|
||||
`Model not found: ${data?.providerID}/${data?.modelID}`,
|
||||
...(Array.isArray(suggestions) && suggestions.length ? ["Did you mean: " + suggestions.join(", ")] : []),
|
||||
...(suggestions.length ? ["Did you mean: " + suggestions.join(", ")] : []),
|
||||
`Try: \`opencode models\` to list available models`,
|
||||
`Or check your config (opencode.json) provider/model names`,
|
||||
].join("\n")
|
||||
@@ -64,10 +64,10 @@ export function FormatError(input: unknown) {
|
||||
const data = (input as ErrorLike).data
|
||||
const path = data?.path
|
||||
const message = data?.message
|
||||
const issues = data?.issues as Array<{ message: string; path: string[] }> | undefined
|
||||
const issues: Array<{ message: string; path: string[] }> = Array.isArray(data?.issues) ? data.issues : []
|
||||
return [
|
||||
`Configuration is invalid${path && path !== "config" ? ` at ${path}` : ""}` + (message ? `: ${message}` : ""),
|
||||
...(issues?.map((issue) => "↳ " + issue.message + " " + issue.path.join(".")) ?? []),
|
||||
...issues.map((issue) => "↳ " + issue.message + " " + issue.path.join(".")),
|
||||
].join("\n")
|
||||
}
|
||||
|
||||
|
||||
@@ -440,12 +440,11 @@ export const layer = Layer.effect(
|
||||
const workspaceSymbol = Effect.fn("LSP.workspaceSymbol")(function* (query: string) {
|
||||
const results = yield* runAll((client) =>
|
||||
client.connection
|
||||
.sendRequest("workspace/symbol", { query })
|
||||
.then((result: any) => result.filter((x: Symbol) => kinds.includes(x.kind)))
|
||||
.then((result: any) => result.slice(0, 10))
|
||||
.catch(() => []),
|
||||
.sendRequest<Symbol[]>("workspace/symbol", { query })
|
||||
.then((result) => result.filter((x) => kinds.includes(x.kind)).slice(0, 10))
|
||||
.catch(() => [] as Symbol[]),
|
||||
)
|
||||
return results.flat() as Symbol[]
|
||||
return results.flat()
|
||||
})
|
||||
|
||||
const prepareCallHierarchy = Effect.fn("LSP.prepareCallHierarchy")(function* (input: LocInput) {
|
||||
|
||||
@@ -124,8 +124,17 @@ export async function install(dir: string) {
|
||||
return
|
||||
}
|
||||
|
||||
const pkg = await Filesystem.readJson(path.join(dir, "package.json")).catch(() => ({}))
|
||||
const lock = await Filesystem.readJson(path.join(dir, "package-lock.json")).catch(() => ({}))
|
||||
type PackageDeps = Record<string, string>
|
||||
type PackageJson = {
|
||||
dependencies?: PackageDeps
|
||||
devDependencies?: PackageDeps
|
||||
peerDependencies?: PackageDeps
|
||||
optionalDependencies?: PackageDeps
|
||||
}
|
||||
const pkg: PackageJson = await Filesystem.readJson<PackageJson>(path.join(dir, "package.json")).catch(() => ({}))
|
||||
const lock: { packages?: Record<string, PackageJson> } = await Filesystem.readJson<{
|
||||
packages?: Record<string, PackageJson>
|
||||
}>(path.join(dir, "package-lock.json")).catch(() => ({}))
|
||||
|
||||
const declared = new Set([
|
||||
...Object.keys(pkg.dependencies || {}),
|
||||
|
||||
@@ -547,12 +547,14 @@ function custom(dep: CustomDep): Record<string, CustomLoader> {
|
||||
},
|
||||
async getModel(sdk: any, modelID: string, options?: Record<string, any>) {
|
||||
if (modelID.startsWith("duo-workflow-")) {
|
||||
const workflowRef = options?.workflowRef as string | undefined
|
||||
const workflowRef = typeof options?.workflowRef === "string" ? options.workflowRef : undefined
|
||||
// Use the static mapping if it exists, otherwise use duo-workflow with selectedModelRef
|
||||
const sdkModelID = isWorkflowModel(modelID) ? modelID : "duo-workflow"
|
||||
const workflowDefinition =
|
||||
typeof options?.workflowDefinition === "string" ? options.workflowDefinition : undefined
|
||||
const model = sdk.workflowChat(sdkModelID, {
|
||||
featureFlags,
|
||||
workflowDefinition: options?.workflowDefinition as string | undefined,
|
||||
workflowDefinition,
|
||||
})
|
||||
if (workflowRef) {
|
||||
model.selectedModelRef = workflowRef
|
||||
|
||||
@@ -272,16 +272,18 @@ export const getUsage = (input: { model: Provider.Model; usage: LanguageModelUsa
|
||||
input.usage.inputTokenDetails?.cacheReadTokens ?? input.usage.cachedInputTokens ?? 0,
|
||||
)
|
||||
const cacheWriteInputTokens = safe(
|
||||
(input.usage.inputTokenDetails?.cacheWriteTokens ??
|
||||
input.metadata?.["anthropic"]?.["cacheCreationInputTokens"] ??
|
||||
// google-vertex-anthropic returns metadata under "vertex" key
|
||||
// (AnthropicMessagesLanguageModel custom provider key from 'vertex.anthropic.messages')
|
||||
input.metadata?.["vertex"]?.["cacheCreationInputTokens"] ??
|
||||
// @ts-expect-error
|
||||
input.metadata?.["bedrock"]?.["usage"]?.["cacheWriteInputTokens"] ??
|
||||
// @ts-expect-error
|
||||
input.metadata?.["venice"]?.["usage"]?.["cacheCreationInputTokens"] ??
|
||||
0) as number,
|
||||
Number(
|
||||
input.usage.inputTokenDetails?.cacheWriteTokens ??
|
||||
input.metadata?.["anthropic"]?.["cacheCreationInputTokens"] ??
|
||||
// google-vertex-anthropic returns metadata under "vertex" key
|
||||
// (AnthropicMessagesLanguageModel custom provider key from 'vertex.anthropic.messages')
|
||||
input.metadata?.["vertex"]?.["cacheCreationInputTokens"] ??
|
||||
// @ts-expect-error
|
||||
input.metadata?.["bedrock"]?.["usage"]?.["cacheWriteInputTokens"] ??
|
||||
// @ts-expect-error
|
||||
input.metadata?.["venice"]?.["usage"]?.["cacheCreationInputTokens"] ??
|
||||
0,
|
||||
),
|
||||
)
|
||||
|
||||
// AI SDK v6 normalized inputTokens to include cached tokens across all providers
|
||||
|
||||
@@ -19,7 +19,7 @@ export type Context<M extends Metadata = Metadata> = {
|
||||
agent: string
|
||||
abort: AbortSignal
|
||||
callID?: string
|
||||
extra?: { [key: string]: any }
|
||||
extra?: { [key: string]: unknown }
|
||||
messages: MessageV2.WithParts[]
|
||||
metadata(input: { title?: string; metadata?: M }): Effect.Effect<void>
|
||||
ask(input: Omit<Permission.Request, "id" | "sessionID" | "tool">): Effect.Effect<void>
|
||||
|
||||
@@ -39,7 +39,7 @@ export async function readText(p: string): Promise<string> {
|
||||
return readFile(p, "utf-8")
|
||||
}
|
||||
|
||||
export async function readJson<T = any>(p: string): Promise<T> {
|
||||
export async function readJson<T = unknown>(p: string): Promise<T> {
|
||||
return JSON.parse(await readFile(p, "utf-8"))
|
||||
}
|
||||
|
||||
|
||||
@@ -757,7 +757,7 @@ test("updates config and writes to file", async () => {
|
||||
const newConfig = { model: "updated/model" }
|
||||
await save(newConfig as any)
|
||||
|
||||
const writtenConfig = await Filesystem.readJson(path.join(tmp.path, "config.json"))
|
||||
const writtenConfig = await Filesystem.readJson<{ model: string }>(path.join(tmp.path, "config.json"))
|
||||
expect(writtenConfig.model).toBe("updated/model")
|
||||
},
|
||||
})
|
||||
|
||||
Reference in New Issue
Block a user