feat(deepseek): support v4 models

Add DeepSeek V4 Flash/Pro support, update Pi packages to 0.70.2, and handle disabled thinking/None by stripping replayed reasoning content.
This commit is contained in:
lsdsjy
2026-04-24 23:09:36 +08:00
committed by GitHub
parent 4f4288e3b5
commit 7d1891e6e6
27 changed files with 429 additions and 129 deletions

View File

@@ -21,7 +21,7 @@ Docs: https://docs.openclaw.ai
- Agents/subagents: add optional forked context for native `sessions_spawn` runs so agents can let a child inherit the requester transcript when needed, while keeping clean isolated sessions as the default; includes prompt guidance, context-engine hook metadata, docs, and QA coverage.
- Codex harness: add structured debug logging for embedded harness selection decisions so `/status` stays simple while gateway logs explain auto-selection and Pi fallback reasons. (#70760) Thanks @100yenadmin.
- Plugin SDK/Codex harness: add provider-owned transport/auth/follow-up seams and harness result classification so Codex-style runtimes can participate in fallback policy without core special-casing. (#70772) Thanks @100yenadmin.
- Dependencies/Pi: update bundled Pi packages to `0.70.0`, use Pi's upstream `gpt-5.5` catalog metadata for OpenAI and OpenAI Codex, and keep only local `gpt-5.5-pro` forward-compat handling.
- Dependencies/Pi: update bundled Pi packages to `0.70.2`, use Pi's upstream `gpt-5.5` and DeepSeek V4 catalog metadata, and keep only local `gpt-5.5-pro` forward-compat handling.
- Models/CLI: speed up `openclaw models list --all --provider <id>` for bundled providers with safe static catalogs while keeping live and third-party providers on registry discovery. (#70632) Thanks @shakkernerd.
- Models/CLI: avoid broad registry enumeration for default `openclaw models list`, reducing default listing latency while preserving configured-row output. (#70883) Thanks @shakkernerd.
- Models/CLI: split `openclaw models list` row-source orchestration and registry loading into narrower helpers without changing list output behavior. (#70867) Thanks @shakkernerd.
@@ -35,6 +35,7 @@ Docs: https://docs.openclaw.ai
- Providers/OpenRouter: add image generation and reference-image editing through `image_generate`, so OpenRouter image models work with `OPENROUTER_API_KEY`. Fixes #55066 via #67668. Thanks @notamicrodose.
- Image generation: let agents request provider-supported quality and output format hints, and pass OpenAI-specific background, moderation, compression, and user hints through the `image_generate` tool. (#70503) Thanks @ottodeng.
- Plugins/Google Meet: let realtime Meet sessions consult the full OpenClaw agent for deeper answers while staying in the live voice loop.
- Providers/DeepSeek: add DeepSeek V4 Flash and V4 Pro to the bundled catalog and make V4 Flash the onboarding default.
### Fixes
@@ -42,6 +43,7 @@ Docs: https://docs.openclaw.ai
- Browser/tool: tell agents not to pass per-call `timeoutMs` on existing-session type, evaluate, and other Chrome MCP actions that reject timeout overrides.
- Codex/GPT-5.4: harden fallback, auth-profile, tool-schema, and replay edge cases across native and embedded runtime paths. (#70743) Thanks @100yenadmin.
- Voice-call/Telnyx: preserve inbound/outbound callback metadata and read transcription text from Telnyx's current `transcription_data` payload.
- Providers/DeepSeek: wire V4 thinking controls and OpenAI-compatible replay policy so follow-up turns preserve DeepSeek `reasoning_content`, while the None/off thinking path strips replayed reasoning fields. Fixes #70931.
- Codex harness: send verbose tool progress to chat channels for native app-server runs, matching the Pi harness `/verbose on` and `/verbose full` behavior. (#70966) Thanks @jalehman.
- Codex models: fetch paginated Codex app-server model catalogs, mark truncated `/codex models` output, and keep ChatGPT OAuth defaults on the `openai-codex/gpt-5.5` route instead of the OpenAI API-key route.
- Codex harness: route native `request_user_input` prompts back to the originating chat, preserve queued follow-up answers, and honor newer app-server command approval amendment decisions.

View File

@@ -235,6 +235,7 @@ See [/providers/kilocode](/providers/kilocode) for setup details.
| BytePlus | `byteplus` / `byteplus-plan` | `BYTEPLUS_API_KEY` | `byteplus-plan/ark-code-latest` |
| Cerebras | `cerebras` | `CEREBRAS_API_KEY` | `cerebras/zai-glm-4.7` |
| Cloudflare AI Gateway | `cloudflare-ai-gateway` | `CLOUDFLARE_AI_GATEWAY_API_KEY` | — |
| DeepSeek | `deepseek` | `DEEPSEEK_API_KEY` | `deepseek/deepseek-v4-flash` |
| GitHub Copilot | `github-copilot` | `COPILOT_GITHUB_TOKEN` / `GH_TOKEN` / `GITHUB_TOKEN` | — |
| Groq | `groq` | `GROQ_API_KEY` | — |
| Hugging Face Inference | `huggingface` | `HUGGINGFACE_HUB_TOKEN` or `HF_TOKEN` | `huggingface/deepseek-ai/DeepSeek-R1` |

View File

@@ -23,10 +23,10 @@ OpenClaw uses the pi SDK to embed an AI coding agent into its messaging gateway
```json
{
"@mariozechner/pi-agent-core": "0.68.1",
"@mariozechner/pi-ai": "0.68.1",
"@mariozechner/pi-coding-agent": "0.68.1",
"@mariozechner/pi-tui": "0.68.1"
"@mariozechner/pi-agent-core": "0.70.2",
"@mariozechner/pi-ai": "0.70.2",
"@mariozechner/pi-coding-agent": "0.70.2",
"@mariozechner/pi-tui": "0.70.2"
}
```

View File

@@ -26,7 +26,7 @@ read_when:
openclaw onboard --auth-choice deepseek-api-key
```
This will prompt for your API key and set `deepseek/deepseek-chat` as the default model.
This will prompt for your API key and set `deepseek/deepseek-v4-flash` as the default model.
</Step>
<Step title="Verify models are available">
@@ -60,13 +60,17 @@ is available to that process (for example, in `~/.openclaw/.env` or via
## Built-in catalog
| Model ref | Name | Input | Context | Max output | Notes |
| ---------------------------- | ----------------- | ----- | ------- | ---------- | ------------------------------------------------- |
| `deepseek/deepseek-chat` | DeepSeek Chat | text | 131,072 | 8,192 | Default model; DeepSeek V3.2 non-thinking surface |
| `deepseek/deepseek-reasoner` | DeepSeek Reasoner | text | 131,072 | 65,536 | Reasoning-enabled V3.2 surface |
| Model ref | Name | Input | Context | Max output | Notes |
| ---------------------------- | ----------------- | ----- | --------- | ---------- | ------------------------------------------ |
| `deepseek/deepseek-v4-flash` | DeepSeek V4 Flash | text | 1,000,000 | 384,000 | Default model; V4 thinking-capable surface |
| `deepseek/deepseek-v4-pro` | DeepSeek V4 Pro | text | 1,000,000 | 384,000 | V4 thinking-capable surface |
| `deepseek/deepseek-chat` | DeepSeek Chat | text | 131,072 | 8,192 | DeepSeek V3.2 non-thinking surface |
| `deepseek/deepseek-reasoner` | DeepSeek Reasoner | text | 131,072 | 65,536 | Reasoning-enabled V3.2 surface |
<Tip>
Both bundled models currently advertise streaming usage compatibility in source.
V4 models support DeepSeek's `thinking` control. OpenClaw also replays
DeepSeek `reasoning_content` on follow-up turns so thinking sessions with tool
calls can continue.
</Tip>
## Config example
@@ -76,7 +80,7 @@ Both bundled models currently advertise streaming usage compatibility in source.
env: { DEEPSEEK_API_KEY: "sk-..." },
agents: {
defaults: {
model: { primary: "deepseek/deepseek-chat" },
model: { primary: "deepseek/deepseek-v4-flash" },
},
},
}

View File

@@ -7,7 +7,7 @@
"dependencies": {
"@anthropic-ai/sdk": "0.90.0",
"@aws/bedrock-token-generator": "^1.1.0",
"@mariozechner/pi-ai": "0.70.0"
"@mariozechner/pi-ai": "0.70.2"
},
"devDependencies": {
"@openclaw/plugin-sdk": "workspace:*"

View File

@@ -5,7 +5,7 @@
"description": "OpenClaw Anthropic provider plugin",
"type": "module",
"dependencies": {
"@mariozechner/pi-ai": "0.70.0"
"@mariozechner/pi-ai": "0.70.2"
},
"devDependencies": {
"@openclaw/plugin-sdk": "workspace:*"

View File

@@ -4,7 +4,7 @@
"description": "OpenClaw Codex harness and model provider plugin",
"type": "module",
"dependencies": {
"@mariozechner/pi-coding-agent": "0.70.0",
"@mariozechner/pi-coding-agent": "0.70.2",
"ajv": "^8.18.0",
"ws": "^8.20.0",
"zod": "^4.3.6"

View File

@@ -4,3 +4,4 @@ export {
DEEPSEEK_MODEL_CATALOG,
} from "./models.js";
export { buildDeepSeekProvider } from "./provider-catalog.js";
export { createDeepSeekV4ThinkingWrapper } from "./stream.js";

View File

@@ -1,8 +1,12 @@
import type { Context, Model } from "@mariozechner/pi-ai";
import { createAssistantMessageEventStream } from "@mariozechner/pi-ai";
import { describe, expect, it } from "vitest";
import { buildOpenAICompletionsParams } from "../../src/agents/openai-transport-stream.js";
import { resolveProviderPluginChoice } from "../../src/plugins/provider-auth-choice.runtime.js";
import { registerSingleProviderPlugin } from "../../test/helpers/plugins/plugin-registration.js";
import { runSingleProviderCatalog } from "../test-support/provider-model-test-helpers.js";
import deepseekPlugin from "./index.js";
import { createDeepSeekV4ThinkingWrapper } from "./stream.js";
describe("deepseek provider plugin", () => {
it("registers DeepSeek with api-key auth wizard metadata", async () => {
@@ -28,14 +32,181 @@ describe("deepseek provider plugin", () => {
expect(catalogProvider.api).toBe("openai-completions");
expect(catalogProvider.baseUrl).toBe("https://api.deepseek.com");
expect(catalogProvider.models?.map((model) => model.id)).toEqual([
"deepseek-v4-flash",
"deepseek-v4-pro",
"deepseek-chat",
"deepseek-reasoner",
]);
expect(catalogProvider.models?.find((model) => model.id === "deepseek-v4-flash")).toMatchObject(
{
reasoning: true,
contextWindow: 1_000_000,
maxTokens: 384_000,
compat: expect.objectContaining({
supportsReasoningEffort: true,
maxTokensField: "max_tokens",
}),
},
);
expect(
catalogProvider.models?.find((model) => model.id === "deepseek-reasoner")?.reasoning,
).toBe(true);
});
it("owns OpenAI-compatible replay policy", async () => {
const provider = await registerSingleProviderPlugin(deepseekPlugin);
expect(provider.buildReplayPolicy?.({ modelApi: "openai-completions" } as never)).toMatchObject(
{
sanitizeToolCallIds: true,
toolCallIdMode: "strict",
validateGeminiTurns: true,
validateAnthropicTurns: true,
},
);
});
it("maps thinking levels to DeepSeek V4 payload controls", async () => {
let capturedPayload: Record<string, unknown> | undefined;
const baseStreamFn = (
_model: Model<"openai-completions">,
_context: Context,
options?: { onPayload?: (payload: unknown) => unknown },
) => {
capturedPayload = {
model: "deepseek-v4-pro",
reasoning_effort: "high",
};
options?.onPayload?.(capturedPayload);
const stream = createAssistantMessageEventStream();
queueMicrotask(() => stream.end());
return stream;
};
const wrapThinkingOff = createDeepSeekV4ThinkingWrapper(baseStreamFn as never, "off");
expect(wrapThinkingOff).toBeDefined();
wrapThinkingOff?.(
{
provider: "deepseek",
id: "deepseek-v4-pro",
api: "openai-completions",
} as never,
{ messages: [] } as never,
{},
);
expect(capturedPayload).toMatchObject({ thinking: { type: "disabled" } });
expect(capturedPayload).not.toHaveProperty("reasoning_effort");
const wrapThinkingXhigh = createDeepSeekV4ThinkingWrapper(baseStreamFn as never, "xhigh");
expect(wrapThinkingXhigh).toBeDefined();
wrapThinkingXhigh?.(
{
provider: "deepseek",
id: "deepseek-v4-pro",
api: "openai-completions",
} as never,
{ messages: [] } as never,
{},
);
expect(capturedPayload).toMatchObject({
thinking: { type: "enabled" },
reasoning_effort: "max",
});
});
it("strips replayed reasoning_content when DeepSeek V4 thinking is disabled", async () => {
let capturedPayload: Record<string, unknown> | undefined;
const model = {
provider: "deepseek",
id: "deepseek-v4-flash",
name: "DeepSeek V4 Flash",
api: "openai-completions",
baseUrl: "https://api.deepseek.com",
reasoning: true,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 1_000_000,
maxTokens: 384_000,
compat: {
supportsUsageInStreaming: true,
supportsReasoningEffort: true,
maxTokensField: "max_tokens",
},
} as Model<"openai-completions">;
const context = {
messages: [
{ role: "user", content: "hi", timestamp: 1 },
{
role: "assistant",
api: "openai-completions",
provider: "deepseek",
model: "deepseek-v4-flash",
content: [
{
type: "thinking",
thinking: "call reasoning",
thinkingSignature: "reasoning_content",
},
{ type: "toolCall", id: "call_1", name: "read", arguments: {} },
],
usage: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
totalTokens: 0,
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },
},
stopReason: "toolUse",
timestamp: 2,
},
{
role: "toolResult",
toolCallId: "call_1",
toolName: "read",
content: [{ type: "text", text: "ok" }],
isError: false,
timestamp: 3,
},
],
tools: [
{
name: "read",
description: "Read data",
parameters: { type: "object", properties: {}, required: [], additionalProperties: false },
},
],
} as Context;
const baseStreamFn = (
streamModel: Model<"openai-completions">,
streamContext: Context,
options?: { onPayload?: (payload: unknown, model: unknown) => unknown },
) => {
capturedPayload = buildOpenAICompletionsParams(streamModel, streamContext, {
reasoning: "high",
} as never);
options?.onPayload?.(capturedPayload, streamModel);
const stream = createAssistantMessageEventStream();
queueMicrotask(() => stream.end());
return stream;
};
const wrapThinkingNone = createDeepSeekV4ThinkingWrapper(
baseStreamFn as never,
"none" as never,
);
expect(wrapThinkingNone).toBeDefined();
wrapThinkingNone?.(model, context, {});
expect(capturedPayload).toMatchObject({ thinking: { type: "disabled" } });
expect(capturedPayload).not.toHaveProperty("reasoning_effort");
expect((capturedPayload?.messages as Array<Record<string, unknown>>)[1]).not.toHaveProperty(
"reasoning_content",
);
});
it("publishes configured DeepSeek models through plugin-owned catalog augmentation", async () => {
const provider = await registerSingleProviderPlugin(deepseekPlugin);

View File

@@ -1,7 +1,9 @@
import { readConfiguredProviderCatalogEntries } from "openclaw/plugin-sdk/provider-catalog-shared";
import { defineSingleProviderPluginEntry } from "openclaw/plugin-sdk/provider-entry";
import { buildProviderReplayFamilyHooks } from "openclaw/plugin-sdk/provider-model-shared";
import { applyDeepSeekConfig, DEEPSEEK_DEFAULT_MODEL_REF } from "./onboard.js";
import { buildDeepSeekProvider } from "./provider-catalog.js";
import { createDeepSeekV4ThinkingWrapper } from "./stream.js";
const PROVIDER_ID = "deepseek";
@@ -42,5 +44,7 @@ export default defineSingleProviderPluginEntry({
}),
matchesContextOverflowError: ({ errorMessage }) =>
/\bdeepseek\b.*(?:input.*too long|context.*exceed)/i.test(errorMessage),
...buildProviderReplayFamilyHooks({ family: "openai-compatible" }),
wrapStreamFn: (ctx) => createDeepSeekV4ThinkingWrapper(ctx.streamFn, ctx.thinkingLevel),
},
});

View File

@@ -2,7 +2,7 @@ import type { ModelDefinitionConfig } from "openclaw/plugin-sdk/provider-model-s
export const DEEPSEEK_BASE_URL = "https://api.deepseek.com";
// DeepSeek V3.2 API pricing (per 1M tokens)
// DeepSeek API pricing (per 1M tokens)
// https://api-docs.deepseek.com/quick_start/pricing
const DEEPSEEK_V3_2_COST = {
input: 0.28,
@@ -11,7 +11,49 @@ const DEEPSEEK_V3_2_COST = {
cacheWrite: 0,
};
const DEEPSEEK_V4_PRO_COST = {
input: 1.74,
output: 3.48,
cacheRead: 0.145,
cacheWrite: 0,
};
const DEEPSEEK_V4_FLASH_COST = {
input: 0.14,
output: 0.28,
cacheRead: 0.028,
cacheWrite: 0,
};
export const DEEPSEEK_MODEL_CATALOG: ModelDefinitionConfig[] = [
{
id: "deepseek-v4-flash",
name: "DeepSeek V4 Flash",
reasoning: true,
input: ["text"],
contextWindow: 1_000_000,
maxTokens: 384_000,
cost: DEEPSEEK_V4_FLASH_COST,
compat: {
supportsUsageInStreaming: true,
supportsReasoningEffort: true,
maxTokensField: "max_tokens",
},
},
{
id: "deepseek-v4-pro",
name: "DeepSeek V4 Pro",
reasoning: true,
input: ["text"],
contextWindow: 1_000_000,
maxTokens: 384_000,
cost: DEEPSEEK_V4_PRO_COST,
compat: {
supportsUsageInStreaming: true,
supportsReasoningEffort: true,
maxTokensField: "max_tokens",
},
},
{
id: "deepseek-chat",
name: "DeepSeek Chat",
@@ -20,7 +62,7 @@ export const DEEPSEEK_MODEL_CATALOG: ModelDefinitionConfig[] = [
contextWindow: 131072,
maxTokens: 8192,
cost: DEEPSEEK_V3_2_COST,
compat: { supportsUsageInStreaming: true },
compat: { supportsUsageInStreaming: true, maxTokensField: "max_tokens" },
},
{
id: "deepseek-reasoner",
@@ -30,7 +72,11 @@ export const DEEPSEEK_MODEL_CATALOG: ModelDefinitionConfig[] = [
contextWindow: 131072,
maxTokens: 65536,
cost: DEEPSEEK_V3_2_COST,
compat: { supportsUsageInStreaming: true },
compat: {
supportsUsageInStreaming: true,
supportsReasoningEffort: false,
maxTokensField: "max_tokens",
},
},
];

View File

@@ -5,7 +5,7 @@ import {
} from "openclaw/plugin-sdk/provider-onboard";
import { buildDeepSeekModelDefinition, DEEPSEEK_BASE_URL, DEEPSEEK_MODEL_CATALOG } from "./api.js";
export const DEEPSEEK_DEFAULT_MODEL_REF = "deepseek/deepseek-chat";
export const DEEPSEEK_DEFAULT_MODEL_REF = "deepseek/deepseek-v4-flash";
export function applyDeepSeekProviderConfig(cfg: OpenClawConfig): OpenClawConfig {
const models = { ...cfg.agents?.defaults?.models };

View File

@@ -0,0 +1,57 @@
import type { ProviderWrapStreamFnContext } from "openclaw/plugin-sdk/plugin-entry";
import { streamWithPayloadPatch } from "openclaw/plugin-sdk/provider-stream-shared";
type DeepSeekThinkingLevel = ProviderWrapStreamFnContext["thinkingLevel"];
function isDeepSeekV4ModelId(modelId: unknown): boolean {
return modelId === "deepseek-v4-flash" || modelId === "deepseek-v4-pro";
}
function isDisabledThinkingLevel(thinkingLevel: DeepSeekThinkingLevel): boolean {
const normalized = typeof thinkingLevel === "string" ? thinkingLevel.toLowerCase() : "";
return normalized === "off" || normalized === "none";
}
function resolveDeepSeekReasoningEffort(thinkingLevel: DeepSeekThinkingLevel): "high" | "max" {
return thinkingLevel === "xhigh" || thinkingLevel === "max" ? "max" : "high";
}
function stripDeepSeekReasoningContent(payload: Record<string, unknown>): void {
if (!Array.isArray(payload.messages)) {
return;
}
for (const message of payload.messages) {
if (!message || typeof message !== "object") {
continue;
}
delete (message as Record<string, unknown>).reasoning_content;
}
}
export function createDeepSeekV4ThinkingWrapper(
baseStreamFn: ProviderWrapStreamFnContext["streamFn"],
thinkingLevel: DeepSeekThinkingLevel,
): ProviderWrapStreamFnContext["streamFn"] {
if (!baseStreamFn) {
return undefined;
}
const underlying = baseStreamFn;
return (model, context, options) => {
if (model.provider !== "deepseek" || !isDeepSeekV4ModelId(model.id)) {
return underlying(model, context, options);
}
return streamWithPayloadPatch(underlying, model, context, options, (payload) => {
if (isDisabledThinkingLevel(thinkingLevel)) {
payload.thinking = { type: "disabled" };
delete payload.reasoning_effort;
delete payload.reasoning;
stripDeepSeekReasoningContent(payload);
return;
}
payload.thinking = { type: "enabled" };
payload.reasoning_effort = resolveDeepSeekReasoningEffort(thinkingLevel);
});
};
}

View File

@@ -5,7 +5,7 @@
"description": "OpenClaw Fireworks provider plugin",
"type": "module",
"dependencies": {
"@mariozechner/pi-ai": "0.70.0"
"@mariozechner/pi-ai": "0.70.2"
},
"devDependencies": {
"@openclaw/plugin-sdk": "workspace:*"

View File

@@ -6,7 +6,7 @@
"type": "module",
"dependencies": {
"@clack/prompts": "^1.2.0",
"@mariozechner/pi-ai": "0.70.0"
"@mariozechner/pi-ai": "0.70.2"
},
"devDependencies": {
"@openclaw/plugin-sdk": "workspace:*"

View File

@@ -6,7 +6,7 @@
"type": "module",
"dependencies": {
"@google/genai": "^1.50.1",
"@mariozechner/pi-ai": "0.70.0"
"@mariozechner/pi-ai": "0.70.2"
},
"devDependencies": {
"@openclaw/plugin-sdk": "workspace:*"

View File

@@ -5,7 +5,7 @@
"description": "OpenClaw Kimi provider plugin",
"type": "module",
"dependencies": {
"@mariozechner/pi-ai": "0.70.0"
"@mariozechner/pi-ai": "0.70.2"
},
"devDependencies": {
"@openclaw/plugin-sdk": "workspace:*"

View File

@@ -5,7 +5,7 @@
"description": "OpenClaw LM Studio provider plugin",
"type": "module",
"dependencies": {
"@mariozechner/pi-ai": "0.70.0"
"@mariozechner/pi-ai": "0.70.2"
},
"openclaw": {
"extensions": [

View File

@@ -5,7 +5,7 @@
"description": "OpenClaw Ollama provider plugin",
"type": "module",
"dependencies": {
"@mariozechner/pi-ai": "0.70.0",
"@mariozechner/pi-ai": "0.70.2",
"typebox": "1.1.28"
},
"devDependencies": {

View File

@@ -5,7 +5,7 @@
"description": "OpenClaw OpenAI provider plugins",
"type": "module",
"dependencies": {
"@mariozechner/pi-ai": "0.70.0",
"@mariozechner/pi-ai": "0.70.2",
"ws": "^8.20.0"
},
"devDependencies": {

View File

@@ -5,7 +5,7 @@
"description": "OpenClaw xAI plugin",
"type": "module",
"dependencies": {
"@mariozechner/pi-ai": "0.70.0",
"@mariozechner/pi-ai": "0.70.2",
"typebox": "1.1.28",
"ws": "^8.20.0"
},

View File

@@ -1582,10 +1582,10 @@
"@anthropic-ai/vertex-sdk": "^0.16.0",
"@clack/prompts": "^1.2.0",
"@lydell/node-pty": "1.2.0-beta.12",
"@mariozechner/pi-agent-core": "0.70.0",
"@mariozechner/pi-ai": "0.70.0",
"@mariozechner/pi-coding-agent": "0.70.0",
"@mariozechner/pi-tui": "0.70.0",
"@mariozechner/pi-agent-core": "0.70.2",
"@mariozechner/pi-ai": "0.70.2",
"@mariozechner/pi-coding-agent": "0.70.2",
"@mariozechner/pi-tui": "0.70.2",
"@modelcontextprotocol/sdk": "1.29.0",
"@mozilla/readability": "^0.6.0",
"@vincentkoc/qrcode-tui": "0.2.1",

190
pnpm-lock.yaml generated
View File

@@ -52,17 +52,17 @@ importers:
specifier: 1.2.0-beta.12
version: 1.2.0-beta.12
'@mariozechner/pi-agent-core':
specifier: 0.70.0
version: 0.70.0(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)
specifier: 0.70.2
version: 0.70.2(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)
'@mariozechner/pi-ai':
specifier: 0.70.0
version: 0.70.0(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)
specifier: 0.70.2
version: 0.70.2(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)
'@mariozechner/pi-coding-agent':
specifier: 0.70.0
version: 0.70.0(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)
specifier: 0.70.2
version: 0.70.2(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)
'@mariozechner/pi-tui':
specifier: 0.70.0
version: 0.70.0
specifier: 0.70.2
version: 0.70.2
'@modelcontextprotocol/sdk':
specifier: 1.29.0
version: 1.29.0(zod@4.3.6)
@@ -277,8 +277,8 @@ importers:
specifier: ^1.1.0
version: 1.1.0
'@mariozechner/pi-ai':
specifier: 0.70.0
version: 0.70.0(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)
specifier: 0.70.2
version: 0.70.2(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)
devDependencies:
'@openclaw/plugin-sdk':
specifier: workspace:*
@@ -287,8 +287,8 @@ importers:
extensions/anthropic:
dependencies:
'@mariozechner/pi-ai':
specifier: 0.70.0
version: 0.70.0(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)
specifier: 0.70.2
version: 0.70.2(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)
devDependencies:
'@openclaw/plugin-sdk':
specifier: workspace:*
@@ -384,8 +384,8 @@ importers:
extensions/codex:
dependencies:
'@mariozechner/pi-coding-agent':
specifier: 0.70.0
version: 0.70.0(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)
specifier: 0.70.2
version: 0.70.2(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)
ajv:
specifier: ^8.18.0
version: 8.18.0
@@ -578,8 +578,8 @@ importers:
extensions/fireworks:
dependencies:
'@mariozechner/pi-ai':
specifier: 0.70.0
version: 0.70.0(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)
specifier: 0.70.2
version: 0.70.2(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)
devDependencies:
'@openclaw/plugin-sdk':
specifier: workspace:*
@@ -591,8 +591,8 @@ importers:
specifier: ^1.2.0
version: 1.2.0
'@mariozechner/pi-ai':
specifier: 0.70.0
version: 0.70.0(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)
specifier: 0.70.2
version: 0.70.2(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)
devDependencies:
'@openclaw/plugin-sdk':
specifier: workspace:*
@@ -604,8 +604,8 @@ importers:
specifier: ^1.50.1
version: 1.50.1(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))
'@mariozechner/pi-ai':
specifier: 0.70.0
version: 0.70.0(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)
specifier: 0.70.2
version: 0.70.2(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)
devDependencies:
'@openclaw/plugin-sdk':
specifier: workspace:*
@@ -685,8 +685,8 @@ importers:
extensions/kimi-coding:
dependencies:
'@mariozechner/pi-ai':
specifier: 0.70.0
version: 0.70.0(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)
specifier: 0.70.2
version: 0.70.2(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)
devDependencies:
'@openclaw/plugin-sdk':
specifier: workspace:*
@@ -727,8 +727,8 @@ importers:
extensions/lmstudio:
dependencies:
'@mariozechner/pi-ai':
specifier: 0.70.0
version: 0.70.0(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)
specifier: 0.70.2
version: 0.70.2(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)
extensions/lobster:
dependencies:
@@ -954,8 +954,8 @@ importers:
extensions/ollama:
dependencies:
'@mariozechner/pi-ai':
specifier: 0.70.0
version: 0.70.0(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)
specifier: 0.70.2
version: 0.70.2(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)
typebox:
specifier: 1.1.28
version: 1.1.28
@@ -973,8 +973,8 @@ importers:
extensions/openai:
dependencies:
'@mariozechner/pi-ai':
specifier: 0.70.0
version: 0.70.0(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)
specifier: 0.70.2
version: 0.70.2(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)
ws:
specifier: ^8.20.0
version: 8.20.0
@@ -1380,8 +1380,8 @@ importers:
extensions/xai:
dependencies:
'@mariozechner/pi-ai':
specifier: 0.70.0
version: 0.70.0(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)
specifier: 0.70.2
version: 0.70.2(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)
typebox:
specifier: 1.1.28
version: 1.1.28
@@ -2693,94 +2693,94 @@ packages:
'@lydell/node-pty@1.2.0-beta.12':
resolution: {integrity: sha512-qIK890UwPupoj07osVvgOIa++1mxeHbcGry4PKRHhNVNs81V2SCG34eJr46GybiOmBtc8Sj5PB1/GGM5PL549g==}
'@mariozechner/clipboard-darwin-arm64@0.3.2':
resolution: {integrity: sha512-uBf6K7Je1ihsgvmWxA8UCGCeI+nbRVRXoarZdLjl6slz94Zs1tNKFZqx7aCI5O1i3e0B6ja82zZ06BWrl0MCVw==}
'@mariozechner/clipboard-darwin-arm64@0.3.3':
resolution: {integrity: sha512-+zhuZGXqVrdkbIRdnwiZNbTJ7V3elq/A+C5d5laJoyhJgWs41eO5NUMkBkj6f23F2L4PRXEhdn5/ktlPx+bG3Q==}
engines: {node: '>= 10'}
cpu: [arm64]
os: [darwin]
'@mariozechner/clipboard-darwin-universal@0.3.2':
resolution: {integrity: sha512-mxSheKTW2U9LsBdXy0SdmdCAE5HqNS9QUmpNHLnfJ+SsbFKALjEZc5oRrVMXxGQSirDvYf5bjmRyT0QYYonnlg==}
'@mariozechner/clipboard-darwin-universal@0.3.3':
resolution: {integrity: sha512-x9aRfTyndVqpEQ44LNNCK/EXZd9y8rWkLQgNhmWpby9PXrjPhNxfjUc2Db4mt4nJjU/4zzO8F5v/XyzlUGSdhQ==}
engines: {node: '>= 10'}
os: [darwin]
'@mariozechner/clipboard-darwin-x64@0.3.2':
resolution: {integrity: sha512-U1BcVEoidvwIp95+HJswSW+xr28EQiHR7rZjH6pn8Sja5yO4Yoe3yCN0Zm8Lo72BbSOK/fTSq0je7CJpaPCspg==}
'@mariozechner/clipboard-darwin-x64@0.3.3':
resolution: {integrity: sha512-6ut/NawB0KiYPCwrirgNp6Br62LntL978q7G6d/Rs2pmPvQb53bP96eUMYl+Y3a7Qk13bGZ4w9rVPFxRE9m9ag==}
engines: {node: '>= 10'}
cpu: [x64]
os: [darwin]
'@mariozechner/clipboard-linux-arm64-gnu@0.3.2':
resolution: {integrity: sha512-BsinwG3yWTIjdgNCxsFlip7LkfwPk+ruw/aFCXHUg/fb5XC/Ksp+YMQ7u0LUtiKzIv/7LMXgZInJQH6gxbAaqQ==}
'@mariozechner/clipboard-linux-arm64-gnu@0.3.3':
resolution: {integrity: sha512-gf3dH4kBddU1AOyHVB53mjLUFfJAKlTmxTMw51jdeg7eE7IjfEBXVvM4bifMtBxbWkT0eA0FUZ1C0KQ6Z5l6pw==}
engines: {node: '>= 10'}
cpu: [arm64]
os: [linux]
libc: [glibc]
'@mariozechner/clipboard-linux-arm64-musl@0.3.2':
resolution: {integrity: sha512-0/Gi5Xq2V6goXBop19ePoHvXsmJD9SzFlO3S+d6+T2b+BlPcpOu3Oa0wTjl+cZrLAAEzA86aPNBI+VVAFDFPKw==}
'@mariozechner/clipboard-linux-arm64-musl@0.3.3':
resolution: {integrity: sha512-o1paj2+zmAQ/LaPS85XJCxhNowNQpxYM2cGY6pWvB5Kqmz6hZjl6CzDg5tbf1hZkn/Em6jpOaE2UtMxKdELBDA==}
engines: {node: '>= 10'}
cpu: [arm64]
os: [linux]
libc: [musl]
'@mariozechner/clipboard-linux-riscv64-gnu@0.3.2':
resolution: {integrity: sha512-2AFFiXB24qf0zOZsxI1GJGb9wQGlOJyN6UwoXqmKS3dpQi/l6ix30IzDDA4c4ZcCcx4D+9HLYXhC1w7Sov8pXA==}
'@mariozechner/clipboard-linux-riscv64-gnu@0.3.3':
resolution: {integrity: sha512-dkEhE4ekePJwMbBq9HP1//CFMNmDzA/iV9AXqBfvL5CWmmDIRXqh4A3YZt3tWO/HdMerX+xNCEiR7WiOsIG+UA==}
engines: {node: '>= 10'}
cpu: [riscv64]
os: [linux]
libc: [glibc]
'@mariozechner/clipboard-linux-x64-gnu@0.3.2':
resolution: {integrity: sha512-v6fVnsn7WMGg73Dab8QMwyFce7tzGfgEixKgzLP8f1GJqkJZi5zO4k4FOHzSgUufgLil63gnxvMpjWkgfeQN7A==}
'@mariozechner/clipboard-linux-x64-gnu@0.3.3':
resolution: {integrity: sha512-lT2yANtTLlEtFBIH3uGoRa/CQas/eBoLNi3qr9axQFoRgF4RGPSJ66yHOSnMECBneTIb1Iqv3UxokTfX27CdoQ==}
engines: {node: '>= 10'}
cpu: [x64]
os: [linux]
libc: [glibc]
'@mariozechner/clipboard-linux-x64-musl@0.3.2':
resolution: {integrity: sha512-xVUtnoMQ8v2JVyfJLKKXACA6avdnchdbBkTsZs8BgJQo29qwCp5NIHAUO8gbJ40iaEGToW5RlmVk2M9V0HsHEw==}
'@mariozechner/clipboard-linux-x64-musl@0.3.3':
resolution: {integrity: sha512-saq/MCB0QHK/7ZZLjAZ0QkbY944dyjOsur8gneGCfMitt+GOiE1CU4OUipHC4b6x8UDY9bRLsR4aBaxu22OFPA==}
engines: {node: '>= 10'}
cpu: [x64]
os: [linux]
libc: [musl]
'@mariozechner/clipboard-win32-arm64-msvc@0.3.2':
resolution: {integrity: sha512-AEgg95TNi8TGgak2wSXZkXKCvAUTjWoU1Pqb0ON7JHrX78p616XUFNTJohtIon3e0w6k0pYPZeCuqRCza/Tqeg==}
'@mariozechner/clipboard-win32-arm64-msvc@0.3.3':
resolution: {integrity: sha512-cGuvSj0/2X2w983yEcKw+i+r1EBej6ZZIN+fXG3eY2G/HaIQpbXpLvMxKyZ9LKtbZx+Z6q/gELEoSBMLML6BaQ==}
engines: {node: '>= 10'}
cpu: [arm64]
os: [win32]
'@mariozechner/clipboard-win32-x64-msvc@0.3.2':
resolution: {integrity: sha512-tGRuYpZwDOD7HBrCpyRuhGnHHSCknELvqwKKUG4JSfSB7JIU7LKRh6zx6fMUOQd8uISK35TjFg5UcNih+vJhFA==}
'@mariozechner/clipboard-win32-x64-msvc@0.3.3':
resolution: {integrity: sha512-5hvaEq/bgYovTIGx43O/S7loIHYV3ue90WcV1dz0wdMXroVKZKeU/yfwM0PALQA1OcrEHiGXGySFReXr72lGtA==}
engines: {node: '>= 10'}
cpu: [x64]
os: [win32]
'@mariozechner/clipboard@0.3.2':
resolution: {integrity: sha512-IHQpksNjo7EAtGuHFU+tbWDp5LarH3HU/8WiB9O70ZEoBPHOg0/6afwSLK0QyNMMmx4Bpi/zl6+DcBXe95nWYA==}
'@mariozechner/clipboard@0.3.3':
resolution: {integrity: sha512-e7jASirzfm+ROiOGFh843+cFZTy3DfzP+jldCvh8RnEk0C3QihDTn7dd7Yh7KAJydwIJ18FJSZ2swHvCJhk18g==}
engines: {node: '>= 10'}
'@mariozechner/jiti@2.6.5':
resolution: {integrity: sha512-faGUlTcXka5l7rv0lP3K3vGW/ejRuOS24RR2aSFWREUQqzjgdsuWNo/IiPqL3kWRGt6Ahl2+qcDAwtdeWeuGUw==}
hasBin: true
'@mariozechner/pi-agent-core@0.70.0':
resolution: {integrity: sha512-ZwfM5QPvSwza/apZhPIXjrI/blJBFqbVpK30ma4zNwH8VAyseKlzGDExCx/k+81Xydg60sQuG2BQVkYGmofuSg==}
'@mariozechner/pi-agent-core@0.70.2':
resolution: {integrity: sha512-g1hIdKyDwmQOoBGO0R4OhpemKeMENeK0vE5FJtuQKqEcsdCAkVBgZAK6aZUARYZVxMA718JS6WPLFWoddzjD7g==}
engines: {node: '>=20.0.0'}
'@mariozechner/pi-ai@0.70.0':
resolution: {integrity: sha512-lVT9bb0eFkNr5YXvZ5r00TNA5r110fOO8uJV9VLCQ5GdtunWIjcptWitzIjjl2MF0/NDs7Kb2EwZctXQWWP7eA==}
'@mariozechner/pi-ai@0.70.2':
resolution: {integrity: sha512-+30LRPjXsXF+oI96DvGWMbdPGeqoLJvadh6UPev7wx2DzhC9FEqXkQcoMZ0usbCm7E9pl8ua8a9s/pQ5ikaUbg==}
engines: {node: '>=20.0.0'}
hasBin: true
'@mariozechner/pi-coding-agent@0.70.0':
resolution: {integrity: sha512-Sw5odG9BYIcRItb/o4Gmq0nSIgoWfx61Isjk3Gk4KqocxHZAOwZZYQ4mgb4GCsevqOMmAzX/H6PC52/TiN76fw==}
'@mariozechner/pi-coding-agent@0.70.2':
resolution: {integrity: sha512-asfNqV89HKAmKvJ1wENBY/UQMIf77kLtkzBrvXnMQV4YbH7D/6KT+VeVzPG6zm5PAZP2UtdLY9B9Cge7IxH37w==}
engines: {node: '>=20.6.0'}
hasBin: true
'@mariozechner/pi-tui@0.70.0':
resolution: {integrity: sha512-x/CwIMP8v9KNrmgEFA0+AWIwSWeNAitEI4eVQtQ6q2a0PpE+vx1+j2oc+iDPe7E1YqrMHXaNlHJVCaVAv/UYrg==}
'@mariozechner/pi-tui@0.70.2':
resolution: {integrity: sha512-PtKC0NepnrYcqMx6MXkWTrBzC9tI62KeC6w940oT46lCbfvgmfqXciR15+9BZpxxc1H4jd3CMrKsmOPVeUqZ0A==}
engines: {node: '>=20.0.0'}
'@matrix-org/matrix-sdk-crypto-nodejs@0.4.0':
@@ -8705,17 +8705,17 @@ snapshots:
dependencies:
'@aws-crypto/sha256-browser': 5.2.0
'@aws-crypto/sha256-js': 5.2.0
'@aws-sdk/core': 3.974.3
'@aws-sdk/core': 3.974.2
'@aws-sdk/middleware-host-header': 3.972.10
'@aws-sdk/middleware-logger': 3.972.10
'@aws-sdk/middleware-recursion-detection': 3.972.11
'@aws-sdk/middleware-user-agent': 3.972.33
'@aws-sdk/region-config-resolver': 3.972.13
'@aws-sdk/middleware-user-agent': 3.972.32
'@aws-sdk/region-config-resolver': 3.972.12
'@aws-sdk/signature-v4-multi-region': 3.996.19
'@aws-sdk/types': 3.973.8
'@aws-sdk/util-endpoints': 3.996.8
'@aws-sdk/util-endpoints': 3.996.7
'@aws-sdk/util-user-agent-browser': 3.972.10
'@aws-sdk/util-user-agent-node': 3.973.19
'@aws-sdk/util-user-agent-node': 3.973.18
'@smithy/config-resolver': 4.4.17
'@smithy/core': 3.23.16
'@smithy/fetch-http-handler': 5.3.17
@@ -9909,48 +9909,48 @@ snapshots:
'@lydell/node-pty-win32-arm64': 1.2.0-beta.12
'@lydell/node-pty-win32-x64': 1.2.0-beta.12
'@mariozechner/clipboard-darwin-arm64@0.3.2':
'@mariozechner/clipboard-darwin-arm64@0.3.3':
optional: true
'@mariozechner/clipboard-darwin-universal@0.3.2':
'@mariozechner/clipboard-darwin-universal@0.3.3':
optional: true
'@mariozechner/clipboard-darwin-x64@0.3.2':
'@mariozechner/clipboard-darwin-x64@0.3.3':
optional: true
'@mariozechner/clipboard-linux-arm64-gnu@0.3.2':
'@mariozechner/clipboard-linux-arm64-gnu@0.3.3':
optional: true
'@mariozechner/clipboard-linux-arm64-musl@0.3.2':
'@mariozechner/clipboard-linux-arm64-musl@0.3.3':
optional: true
'@mariozechner/clipboard-linux-riscv64-gnu@0.3.2':
'@mariozechner/clipboard-linux-riscv64-gnu@0.3.3':
optional: true
'@mariozechner/clipboard-linux-x64-gnu@0.3.2':
'@mariozechner/clipboard-linux-x64-gnu@0.3.3':
optional: true
'@mariozechner/clipboard-linux-x64-musl@0.3.2':
'@mariozechner/clipboard-linux-x64-musl@0.3.3':
optional: true
'@mariozechner/clipboard-win32-arm64-msvc@0.3.2':
'@mariozechner/clipboard-win32-arm64-msvc@0.3.3':
optional: true
'@mariozechner/clipboard-win32-x64-msvc@0.3.2':
'@mariozechner/clipboard-win32-x64-msvc@0.3.3':
optional: true
'@mariozechner/clipboard@0.3.2':
'@mariozechner/clipboard@0.3.3':
optionalDependencies:
'@mariozechner/clipboard-darwin-arm64': 0.3.2
'@mariozechner/clipboard-darwin-universal': 0.3.2
'@mariozechner/clipboard-darwin-x64': 0.3.2
'@mariozechner/clipboard-linux-arm64-gnu': 0.3.2
'@mariozechner/clipboard-linux-arm64-musl': 0.3.2
'@mariozechner/clipboard-linux-riscv64-gnu': 0.3.2
'@mariozechner/clipboard-linux-x64-gnu': 0.3.2
'@mariozechner/clipboard-linux-x64-musl': 0.3.2
'@mariozechner/clipboard-win32-arm64-msvc': 0.3.2
'@mariozechner/clipboard-win32-x64-msvc': 0.3.2
'@mariozechner/clipboard-darwin-arm64': 0.3.3
'@mariozechner/clipboard-darwin-universal': 0.3.3
'@mariozechner/clipboard-darwin-x64': 0.3.3
'@mariozechner/clipboard-linux-arm64-gnu': 0.3.3
'@mariozechner/clipboard-linux-arm64-musl': 0.3.3
'@mariozechner/clipboard-linux-riscv64-gnu': 0.3.3
'@mariozechner/clipboard-linux-x64-gnu': 0.3.3
'@mariozechner/clipboard-linux-x64-musl': 0.3.3
'@mariozechner/clipboard-win32-arm64-msvc': 0.3.3
'@mariozechner/clipboard-win32-x64-msvc': 0.3.3
optional: true
'@mariozechner/jiti@2.6.5':
@@ -9958,9 +9958,9 @@ snapshots:
std-env: 3.10.0
yoctocolors: 2.1.2
'@mariozechner/pi-agent-core@0.70.0(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)':
'@mariozechner/pi-agent-core@0.70.2(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)':
dependencies:
'@mariozechner/pi-ai': 0.70.0(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)
'@mariozechner/pi-ai': 0.70.2(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)
typebox: 1.1.28
transitivePeerDependencies:
- '@modelcontextprotocol/sdk'
@@ -9971,7 +9971,7 @@ snapshots:
- ws
- zod
'@mariozechner/pi-ai@0.70.0(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)':
'@mariozechner/pi-ai@0.70.2(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)':
dependencies:
'@anthropic-ai/sdk': 0.90.0(zod@4.3.6)
'@aws-sdk/client-bedrock-runtime': 3.1034.0
@@ -9993,12 +9993,12 @@ snapshots:
- ws
- zod
'@mariozechner/pi-coding-agent@0.70.0(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)':
'@mariozechner/pi-coding-agent@0.70.2(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)':
dependencies:
'@mariozechner/jiti': 2.6.5
'@mariozechner/pi-agent-core': 0.70.0(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)
'@mariozechner/pi-ai': 0.70.0(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)
'@mariozechner/pi-tui': 0.70.0
'@mariozechner/pi-agent-core': 0.70.2(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)
'@mariozechner/pi-ai': 0.70.2(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)
'@mariozechner/pi-tui': 0.70.2
'@silvia-odwyer/photon-node': 0.3.4
chalk: 5.6.2
cli-highlight: 2.1.11
@@ -10017,7 +10017,7 @@ snapshots:
uuid: 14.0.0
yaml: 2.8.3
optionalDependencies:
'@mariozechner/clipboard': 0.3.2
'@mariozechner/clipboard': 0.3.3
transitivePeerDependencies:
- '@modelcontextprotocol/sdk'
- aws-crt
@@ -10027,7 +10027,7 @@ snapshots:
- ws
- zod
'@mariozechner/pi-tui@0.70.0':
'@mariozechner/pi-tui@0.70.2':
dependencies:
'@types/mime-types': 2.1.4
chalk: 5.6.2

View File

@@ -16,7 +16,7 @@ export type OpenAICompletionsCompatDefaults = {
supportsReasoningEffort: boolean;
supportsUsageInStreaming: boolean;
maxTokensField: "max_completion_tokens" | "max_tokens";
thinkingFormat: "openai" | "openrouter" | "zai";
thinkingFormat: "openai" | "openrouter" | "deepseek" | "zai";
visibleReasoningDetailTypes: string[];
supportsStrictMode: boolean;
};
@@ -72,6 +72,9 @@ export function resolveOpenAICompletionsCompatDefaults(
const isZai =
endpointClass === "zai-native" ||
(isDefaultRoute && isDefaultRouteProvider(input.provider, "zai"));
const isDeepSeek =
endpointClass === "deepseek-native" ||
(isDefaultRoute && isDefaultRouteProvider(input.provider, "deepseek"));
const isNonStandard =
endpointClass === "cerebras-native" ||
endpointClass === "chutes-native" ||
@@ -105,7 +108,13 @@ export function resolveOpenAICompletionsCompatDefaults(
supportsKnownLocalStreamingUsage ||
(!isNonStandard && (!usesConfiguredNonOpenAIEndpoint || supportsNativeStreamingUsageCompat)),
maxTokensField: usesMaxTokens ? "max_tokens" : "max_completion_tokens",
thinkingFormat: isZai ? "zai" : isOpenRouterLike ? "openrouter" : "openai",
thinkingFormat: isDeepSeek
? "deepseek"
: isZai
? "zai"
: isOpenRouterLike
? "openrouter"
: "openai",
visibleReasoningDetailTypes: isOpenRouterLike ? ["response.output_text", "response.text"] : [],
supportsStrictMode: !isZai && !usesConfiguredNonOpenAIEndpoint,
};

View File

@@ -2900,6 +2900,10 @@ export const GENERATED_BASE_CONFIG_SCHEMA: BaseConfigSchemaResponse = {
type: "string",
const: "openrouter",
},
{
type: "string",
const: "deepseek",
},
{
type: "string",
const: "zai",

View File

@@ -202,6 +202,7 @@ export const ModelCompatSchema = z
.union([
z.literal("openai"),
z.literal("openrouter"),
z.literal("deepseek"),
z.literal("zai"),
z.literal("qwen"),
z.literal("qwen-chat-template"),

View File

@@ -42,7 +42,7 @@ export function normalizeThinkLevel(raw?: string | null): string | undefined {
if (collapsed === "xhigh" || collapsed === "extrahigh") {
return "xhigh";
}
if (key === "off") {
if (key === "off" || key === "none") {
return "off";
}
if (["on", "enable", "enabled"].includes(key)) {