Add GPT-5.5 Codex model support

This commit is contained in:
Ben Vargas
2026-04-23 13:28:03 -06:00
parent 7d5f6d9382
commit 736018a0b0
2 changed files with 96 additions and 8 deletions

View File

@@ -0,0 +1,88 @@
package registry
import "testing"
func TestCodexStaticModelsIncludeGPT55(t *testing.T) {
tierModels := map[string][]*ModelInfo{
"free": GetCodexFreeModels(),
"team": GetCodexTeamModels(),
"plus": GetCodexPlusModels(),
"pro": GetCodexProModels(),
}
for tier, models := range tierModels {
t.Run(tier, func(t *testing.T) {
model := findModelInfo(models, "gpt-5.5")
if model == nil {
t.Fatalf("expected codex %s tier to include gpt-5.5", tier)
}
assertGPT55ModelInfo(t, tier, model)
})
}
model := LookupStaticModelInfo("gpt-5.5")
if model == nil {
t.Fatal("expected LookupStaticModelInfo to find gpt-5.5")
}
assertGPT55ModelInfo(t, "lookup", model)
}
func findModelInfo(models []*ModelInfo, id string) *ModelInfo {
for _, model := range models {
if model != nil && model.ID == id {
return model
}
}
return nil
}
func assertGPT55ModelInfo(t *testing.T, source string, model *ModelInfo) {
t.Helper()
if model.ID != "gpt-5.5" {
t.Fatalf("%s id mismatch: got %q", source, model.ID)
}
if model.Object != "model" {
t.Fatalf("%s object mismatch: got %q", source, model.Object)
}
if model.Created != 1776902400 {
t.Fatalf("%s created timestamp mismatch: got %d", source, model.Created)
}
if model.OwnedBy != "openai" {
t.Fatalf("%s owned_by mismatch: got %q", source, model.OwnedBy)
}
if model.Type != "openai" {
t.Fatalf("%s type mismatch: got %q", source, model.Type)
}
if model.DisplayName != "GPT 5.5" {
t.Fatalf("%s display name mismatch: got %q", source, model.DisplayName)
}
if model.Version != "gpt-5.5" {
t.Fatalf("%s version mismatch: got %q", source, model.Version)
}
if model.Description != "Frontier model for complex coding, research, and real-world work." {
t.Fatalf("%s description mismatch: got %q", source, model.Description)
}
if model.ContextLength != 272000 {
t.Fatalf("%s context length mismatch: got %d", source, model.ContextLength)
}
if model.MaxCompletionTokens != 128000 {
t.Fatalf("%s max completion tokens mismatch: got %d", source, model.MaxCompletionTokens)
}
if len(model.SupportedParameters) != 1 || model.SupportedParameters[0] != "tools" {
t.Fatalf("%s supported parameters mismatch: got %v", source, model.SupportedParameters)
}
if model.Thinking == nil {
t.Fatalf("%s missing thinking support", source)
}
want := []string{"low", "medium", "high", "xhigh"}
if len(model.Thinking.Levels) != len(want) {
t.Fatalf("%s thinking level count mismatch: got %d, want %d", source, len(model.Thinking.Levels), len(want))
}
for i, level := range want {
if model.Thinking.Levels[i] != level {
t.Fatalf("%s thinking level %d mismatch: got %q, want %q", source, i, model.Thinking.Levels[i], level)
}
}
}

View File

@@ -1301,8 +1301,8 @@
"type": "openai",
"display_name": "GPT 5.5",
"version": "gpt-5.5",
"description": "Stable version of GPT 5.5",
"context_length": 1050000,
"description": "Frontier model for complex coding, research, and real-world work.",
"context_length": 272000,
"max_completion_tokens": 128000,
"supported_parameters": [
"tools"
@@ -1419,8 +1419,8 @@
"type": "openai",
"display_name": "GPT 5.5",
"version": "gpt-5.5",
"description": "Stable version of GPT 5.5",
"context_length": 1050000,
"description": "Frontier model for complex coding, research, and real-world work.",
"context_length": 272000,
"max_completion_tokens": 128000,
"supported_parameters": [
"tools"
@@ -1560,8 +1560,8 @@
"type": "openai",
"display_name": "GPT 5.5",
"version": "gpt-5.5",
"description": "Stable version of GPT 5.5",
"context_length": 1050000,
"description": "Frontier model for complex coding, research, and real-world work.",
"context_length": 272000,
"max_completion_tokens": 128000,
"supported_parameters": [
"tools"
@@ -1701,8 +1701,8 @@
"type": "openai",
"display_name": "GPT 5.5",
"version": "gpt-5.5",
"description": "Stable version of GPT 5.5",
"context_length": 1050000,
"description": "Frontier model for complex coding, research, and real-world work.",
"context_length": 272000,
"max_completion_tokens": 128000,
"supported_parameters": [
"tools"