fix: address review feedback for Vercel AI Gateway PR

- Add --vercel-ai-gateway CLI option to install command
- Fix librarian/explore priority: native providers (ZAI, Copilot) now take precedence over Vercel gateway
- Add hasVercelAiGateway to installer no-provider warning conditions
- Add hasVercelAiGateway: false to all test file InstallConfig fixtures
- Fix test expectations for model ID format (claude-opus-4.6 vs claude-opus-4-6)
This commit is contained in:
YeonGyu-Kim
2026-04-13 15:31:28 +09:00
parent 569addd3b0
commit e3b5c2be2e
10 changed files with 30 additions and 11 deletions

View File

@@ -22,6 +22,7 @@ describe("runCliInstaller telemetry isolation", () => {
hasZaiCodingPlan: false,
hasKimiForCoding: false,
hasOpencodeGo: false,
hasVercelAiGateway: false,
}),
spyOn(configManager, "isOpenCodeInstalled").mockResolvedValue(true),
spyOn(configManager, "getOpenCodeVersion").mockResolvedValue("1.4.0"),

View File

@@ -37,6 +37,7 @@ describe("runCliInstaller", () => {
hasZaiCodingPlan: false,
hasKimiForCoding: false,
hasOpencodeGo: false,
hasVercelAiGateway: false,
}),
spyOn(configManager, "isOpenCodeInstalled").mockResolvedValue(true),
spyOn(configManager, "getOpenCodeVersion").mockResolvedValue("1.3.9"),
@@ -83,6 +84,7 @@ describe("runCliInstaller", () => {
hasZaiCodingPlan: false,
hasKimiForCoding: false,
hasOpencodeGo: false,
hasVercelAiGateway: false,
}),
spyOn(configManager, "isOpenCodeInstalled").mockResolvedValue(true),
spyOn(configManager, "getOpenCodeVersion").mockResolvedValue("1.4.0"),

View File

@@ -138,7 +138,8 @@ export async function runCliInstaller(args: InstallArgs, version: string): Promi
!config.hasOpenAI &&
!config.hasGemini &&
!config.hasCopilot &&
!config.hasOpencodeZen
!config.hasOpencodeZen &&
!config.hasVercelAiGateway
) {
printWarning("No model providers configured. Using opencode/big-pickle as fallback.")
}

View File

@@ -33,6 +33,7 @@ program
.option("--zai-coding-plan <value>", "Z.ai Coding Plan subscription: no, yes (default: no)")
.option("--kimi-for-coding <value>", "Kimi For Coding subscription: no, yes (default: no)")
.option("--opencode-go <value>", "OpenCode Go subscription: no, yes (default: no)")
.option("--vercel-ai-gateway <value>", "Vercel AI Gateway: no, yes (default: no)")
.option("--skip-auth", "Skip authentication setup hints")
.addHelpText("after", `
Examples:
@@ -40,14 +41,15 @@ Examples:
$ bunx oh-my-opencode install --no-tui --claude=max20 --openai=yes --gemini=yes --copilot=no
$ bunx oh-my-opencode install --no-tui --claude=no --gemini=no --copilot=yes --opencode-zen=yes
Model Providers (Priority: Native > Copilot > OpenCode Zen > Z.ai > Kimi):
Model Providers (Priority: Native > Copilot > OpenCode Zen > Z.ai > Kimi > Vercel):
Claude Native anthropic/ models (Opus, Sonnet, Haiku)
OpenAI Native openai/ models (GPT-5.4 for Oracle)
Gemini Native google/ models (Gemini 3.1 Pro, Flash)
Copilot github-copilot/ models (fallback)
OpenCode Zen opencode/ models (opencode/claude-opus-4-6, etc.)
Z.ai zai-coding-plan/glm-5 (visual-engineering fallback)
Z.ai zai-coding-plan/glm-5 (visual-engineering fallback)
Kimi kimi-for-coding/k2p5 (Sisyphus/Prometheus fallback)
Vercel vercel/ models (universal proxy, always last fallback)
`)
.action(async (options) => {
const args: InstallArgs = {
@@ -60,6 +62,7 @@ Model Providers (Priority: Native > Copilot > OpenCode Zen > Z.ai > Kimi):
zaiCodingPlan: options.zaiCodingPlan,
kimiForCoding: options.kimiForCoding,
opencodeGo: options.opencodeGo,
vercelAiGateway: options.vercelAiGateway,
skipAuth: options.skipAuth ?? false,
}
const exitCode = await install(args)

View File

@@ -18,6 +18,7 @@ describe("generateOmoConfig - model fallback system", () => {
hasZaiCodingPlan: false,
hasKimiForCoding: false,
hasOpencodeGo: false,
hasVercelAiGateway: false,
}
//#when
@@ -42,6 +43,7 @@ describe("generateOmoConfig - model fallback system", () => {
hasZaiCodingPlan: false,
hasKimiForCoding: false,
hasOpencodeGo: false,
hasVercelAiGateway: false,
}
//#when
@@ -64,6 +66,7 @@ describe("generateOmoConfig - model fallback system", () => {
hasZaiCodingPlan: true,
hasKimiForCoding: false,
hasOpencodeGo: false,
hasVercelAiGateway: false,
}
//#when
@@ -71,7 +74,7 @@ describe("generateOmoConfig - model fallback system", () => {
//#then
expect((result.agents as Record<string, { model: string }>).librarian.model).toBe("zai-coding-plan/glm-4.7")
expect((result.agents as Record<string, { model: string }>).sisyphus.model).toBe("anthropic/claude-opus-4-6")
expect((result.agents as Record<string, { model: string }>).sisyphus.model).toBe("anthropic/claude-opus-4.6")
})
test("uses native OpenAI models when only ChatGPT available", () => {
@@ -86,6 +89,7 @@ describe("generateOmoConfig - model fallback system", () => {
hasZaiCodingPlan: false,
hasKimiForCoding: false,
hasOpencodeGo: false,
hasVercelAiGateway: false,
}
//#when
@@ -110,6 +114,7 @@ describe("generateOmoConfig - model fallback system", () => {
hasZaiCodingPlan: false,
hasKimiForCoding: false,
hasOpencodeGo: false,
hasVercelAiGateway: false,
}
//#when
@@ -126,7 +131,7 @@ describe("generateOmoConfig - model fallback system", () => {
}>
//#then
expect(agents.sisyphus.model).toBe("anthropic/claude-opus-4-6")
expect(agents.sisyphus.model).toBe("anthropic/claude-opus-4.6")
expect(agents.sisyphus.fallback_models).toEqual([
{
model: "openai/gpt-5.4",
@@ -136,7 +141,7 @@ describe("generateOmoConfig - model fallback system", () => {
expect(categories.deep.model).toBe("openai/gpt-5.4")
expect(categories.deep.fallback_models).toEqual([
{
model: "anthropic/claude-opus-4-6",
model: "anthropic/claude-opus-4.6",
variant: "max",
},
])
@@ -154,6 +159,7 @@ describe("generateOmoConfig - model fallback system", () => {
hasZaiCodingPlan: false,
hasKimiForCoding: false,
hasOpencodeGo: false,
hasVercelAiGateway: false,
}
//#when
@@ -175,6 +181,7 @@ describe("generateOmoConfig - model fallback system", () => {
hasZaiCodingPlan: false,
hasKimiForCoding: false,
hasOpencodeGo: false,
hasVercelAiGateway: false,
}
//#when

View File

@@ -20,6 +20,7 @@ const installConfig: InstallConfig = {
hasZaiCodingPlan: false,
hasKimiForCoding: false,
hasOpencodeGo: false,
hasVercelAiGateway: false,
}
function getRecord(value: unknown): Record<string, unknown> {

View File

@@ -129,10 +129,10 @@ export function generateModelConfig(config: InstallConfig): GeneratedOmoConfig {
let agentConfig: AgentConfig | undefined
if (avail.opencodeGo) {
agentConfig = { model: "opencode-go/minimax-m2.7" }
} else if (avail.vercelAiGateway) {
agentConfig = { model: "vercel/minimax/minimax-m2.7" }
} else if (avail.zai) {
agentConfig = { model: ZAI_MODEL }
} else if (avail.vercelAiGateway) {
agentConfig = { model: "vercel/minimax/minimax-m2.7" }
}
if (agentConfig) {
agents[role] = attachAllFallbackModels(agentConfig, req.fallbackChain, avail)
@@ -148,10 +148,10 @@ export function generateModelConfig(config: InstallConfig): GeneratedOmoConfig {
agentConfig = { model: "opencode/claude-haiku-4-5" }
} else if (avail.opencodeGo) {
agentConfig = { model: "opencode-go/minimax-m2.7" }
} else if (avail.vercelAiGateway) {
agentConfig = { model: "vercel/minimax/minimax-m2.7-highspeed" }
} else if (avail.copilot) {
agentConfig = { model: "github-copilot/gpt-5-mini" }
} else if (avail.vercelAiGateway) {
agentConfig = { model: "vercel/minimax/minimax-m2.7-highspeed" }
} else {
agentConfig = { model: "opencode/gpt-5-nano" }
}

View File

@@ -14,6 +14,7 @@ function createConfig(overrides: Partial<InstallConfig> = {}): InstallConfig {
hasZaiCodingPlan: false,
hasKimiForCoding: false,
hasOpencodeGo: false,
hasVercelAiGateway: false,
...overrides,
}
}

View File

@@ -44,6 +44,7 @@ describe("runTuiInstaller", () => {
hasZaiCodingPlan: false,
hasKimiForCoding: false,
hasOpencodeGo: false,
hasVercelAiGateway: false,
}),
spyOn(configManager, "isOpenCodeInstalled").mockResolvedValue(true),
spyOn(configManager, "getOpenCodeVersion").mockResolvedValue("1.3.9"),
@@ -92,6 +93,7 @@ describe("runTuiInstaller", () => {
hasZaiCodingPlan: false,
hasKimiForCoding: false,
hasOpencodeGo: false,
hasVercelAiGateway: false,
}),
spyOn(configManager, "isOpenCodeInstalled").mockResolvedValue(true),
spyOn(configManager, "getOpenCodeVersion").mockResolvedValue("1.4.0"),
@@ -105,6 +107,7 @@ describe("runTuiInstaller", () => {
hasZaiCodingPlan: false,
hasKimiForCoding: false,
hasOpencodeGo: false,
hasVercelAiGateway: false,
}),
spyOn(configManager, "addPluginToOpenCodeConfig").mockResolvedValue({
success: true,

View File

@@ -77,7 +77,7 @@ export async function runTuiInstaller(args: InstallArgs, version: string): Promi
)
}
if (!config.hasClaude && !config.hasOpenAI && !config.hasGemini && !config.hasCopilot && !config.hasOpencodeZen) {
if (!config.hasClaude && !config.hasOpenAI && !config.hasGemini && !config.hasCopilot && !config.hasOpencodeZen && !config.hasVercelAiGateway) {
p.log.warn("No model providers configured. Using opencode/big-pickle as fallback.")
}