fix(openai): share responses transport hooks

This commit is contained in:
Vincent Koc
2026-04-14 16:39:55 +01:00
parent 4e46488d1b
commit 36f4913e30
4 changed files with 44 additions and 25 deletions

View File

@@ -23,21 +23,15 @@ import { OPENAI_CODEX_DEFAULT_MODEL } from "./default-models.js";
import { resolveCodexAuthIdentity } from "./openai-codex-auth-identity.js";
import { buildOpenAICodexProvider } from "./openai-codex-catalog.js";
import { CODEX_CLI_PROFILE_ID, readOpenAICodexCliOAuthProfile } from "./openai-codex-cli-auth.js";
import { buildOpenAIReplayPolicy } from "./replay-policy.js";
import {
buildOpenAIResponsesProviderHooks,
buildOpenAISyntheticCatalogEntry,
cloneFirstTemplateModel,
defaultOpenAIResponsesExtraParams,
findCatalogTemplate,
isOpenAIApiBaseUrl,
isOpenAICodexBaseUrl,
matchesExactOrPrefix,
OPENAI_RESPONSES_STREAM_HOOKS,
} from "./shared.js";
import {
resolveOpenAITransportTurnState,
resolveOpenAIWebSocketSessionPolicy,
} from "./transport-policy.js";
const PROVIDER_ID = "openai-codex";
const OPENAI_CODEX_BASE_URL = "https://chatgpt.com/backend-api";
@@ -334,11 +328,7 @@ export function buildOpenAICodexProviderPlugin(): ProviderPlugin {
const id = ctx.modelId.trim().toLowerCase();
return id === OPENAI_CODEX_GPT_54_MODEL_ID || id === OPENAI_CODEX_GPT_54_PRO_MODEL_ID;
},
buildReplayPolicy: buildOpenAIReplayPolicy,
prepareExtraParams: (ctx) => defaultOpenAIResponsesExtraParams(ctx.extraParams),
...OPENAI_RESPONSES_STREAM_HOOKS,
resolveTransportTurnState: (ctx) => resolveOpenAITransportTurnState(ctx),
resolveWebSocketSessionPolicy: (ctx) => resolveOpenAIWebSocketSessionPolicy(ctx),
...buildOpenAIResponsesProviderHooks(),
resolveReasoningOutputMode: () => "native",
normalizeResolvedModel: (ctx) => {
if (normalizeProviderId(ctx.provider) !== PROVIDER_ID) {

View File

@@ -403,6 +403,11 @@ describe("buildOpenAIProvider", () => {
const codexProvider = buildOpenAICodexProviderPlugin();
expect(provider.wrapStreamFn).toBe(codexProvider.wrapStreamFn);
expect(provider.buildReplayPolicy).toBe(codexProvider.buildReplayPolicy);
expect(provider.resolveTransportTurnState).toBe(codexProvider.resolveTransportTurnState);
expect(provider.resolveWebSocketSessionPolicy).toBe(
codexProvider.resolveWebSocketSessionPolicy,
);
});
it("owns Azure OpenAI reasoning compatibility without forcing OpenAI transport defaults", () => {

View File

@@ -11,20 +11,14 @@ import {
} from "openclaw/plugin-sdk/provider-model-shared";
import { normalizeLowercaseStringOrEmpty } from "openclaw/plugin-sdk/text-runtime";
import { applyOpenAIConfig, OPENAI_DEFAULT_MODEL } from "./default-models.js";
import { buildOpenAIReplayPolicy } from "./replay-policy.js";
import {
buildOpenAIResponsesProviderHooks,
buildOpenAISyntheticCatalogEntry,
cloneFirstTemplateModel,
defaultOpenAIResponsesExtraParams,
findCatalogTemplate,
isOpenAIApiBaseUrl,
matchesExactOrPrefix,
OPENAI_RESPONSES_STREAM_HOOKS,
} from "./shared.js";
import {
resolveOpenAITransportTurnState,
resolveOpenAIWebSocketSessionPolicy,
} from "./transport-policy.js";
const PROVIDER_ID = "openai";
const OPENAI_GPT_54_MODEL_ID = "gpt-5.4";
@@ -220,14 +214,9 @@ export function buildOpenAIProvider(): ProviderPlugin {
shouldUseOpenAIResponsesTransport({ provider, api, baseUrl })
? { api: "openai-responses", baseUrl }
: undefined,
buildReplayPolicy: buildOpenAIReplayPolicy,
prepareExtraParams: (ctx) =>
defaultOpenAIResponsesExtraParams(ctx.extraParams, { openaiWsWarmup: true }),
...OPENAI_RESPONSES_STREAM_HOOKS,
...buildOpenAIResponsesProviderHooks({ openaiWsWarmup: true }),
matchesContextOverflowError: ({ errorMessage }) =>
/content_filter.*(?:prompt|input).*(?:too long|exceed)/i.test(errorMessage),
resolveTransportTurnState: (ctx) => resolveOpenAITransportTurnState(ctx),
resolveWebSocketSessionPolicy: (ctx) => resolveOpenAIWebSocketSessionPolicy(ctx),
resolveReasoningOutputMode: () => "native",
supportsXHighThinking: ({ modelId }) => matchesExactOrPrefix(modelId, OPENAI_XHIGH_MODEL_IDS),
isModernModelRef: ({ modelId }) => matchesExactOrPrefix(modelId, OPENAI_MODERN_MODEL_IDS),

View File

@@ -3,9 +3,15 @@ import { findCatalogTemplate } from "openclaw/plugin-sdk/provider-catalog-shared
import {
cloneFirstTemplateModel,
matchesExactOrPrefix,
type ProviderPlugin,
} from "openclaw/plugin-sdk/provider-model-shared";
import { buildProviderStreamFamilyHooks } from "openclaw/plugin-sdk/provider-stream-family";
import { normalizeOptionalString } from "openclaw/plugin-sdk/text-runtime";
import { buildOpenAIReplayPolicy } from "./replay-policy.js";
import {
resolveOpenAITransportTurnState,
resolveOpenAIWebSocketSessionPolicy,
} from "./transport-policy.js";
type SyntheticOpenAIModelCatalogCost = {
input: number;
@@ -78,6 +84,35 @@ export function defaultOpenAIResponsesExtraParams(
};
}
type OpenAIResponsesProviderHooks = Pick<
ProviderPlugin,
| "buildReplayPolicy"
| "prepareExtraParams"
| "wrapStreamFn"
| "resolveTransportTurnState"
| "resolveWebSocketSessionPolicy"
>;
const resolveOpenAIResponsesTransportTurnState: NonNullable<
OpenAIResponsesProviderHooks["resolveTransportTurnState"]
> = (ctx) => resolveOpenAITransportTurnState(ctx);
const resolveOpenAIResponsesWebSocketSessionPolicy: NonNullable<
OpenAIResponsesProviderHooks["resolveWebSocketSessionPolicy"]
> = (ctx) => resolveOpenAIWebSocketSessionPolicy(ctx);
export function buildOpenAIResponsesProviderHooks(options?: {
openaiWsWarmup?: boolean;
}): OpenAIResponsesProviderHooks {
return {
buildReplayPolicy: buildOpenAIReplayPolicy,
prepareExtraParams: (ctx) => defaultOpenAIResponsesExtraParams(ctx.extraParams, options),
...OPENAI_RESPONSES_STREAM_HOOKS,
resolveTransportTurnState: resolveOpenAIResponsesTransportTurnState,
resolveWebSocketSessionPolicy: resolveOpenAIResponsesWebSocketSessionPolicy,
};
}
export function buildOpenAISyntheticCatalogEntry(
template: ReturnType<typeof findCatalogTemplate>,
entry: {