Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion cmd/testai/main-testai.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ var testSchemaJSON string

const (
DefaultAnthropicModel = "claude-sonnet-4-5"
DefaultOpenAIModel = "gpt-5"
DefaultOpenAIModel = "gpt-5.1"
)

// TestResponseWriter implements http.ResponseWriter and additional interfaces for testing
Expand Down
4 changes: 2 additions & 2 deletions frontend/app/aipanel/aipanel-contextmenu.ts
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ export async function handleWaveAIContextMenu(e: React.MouseEvent, showCopy: boo
},
},
{
label: hasPremium ? "Balanced (gpt-5, low thinking)" : "Balanced (premium)",
label: hasPremium ? "Balanced (gpt-5.1, low thinking)" : "Balanced (premium)",
type: "checkbox",
checked: currentThinkingMode === "balanced",
enabled: hasPremium,
Expand All @@ -71,7 +71,7 @@ export async function handleWaveAIContextMenu(e: React.MouseEvent, showCopy: boo
},
},
{
label: hasPremium ? "Deep (gpt-5, full thinking)" : "Deep (premium)",
label: hasPremium ? "Deep (gpt-5.1, full thinking)" : "Deep (premium)",
type: "checkbox",
checked: currentThinkingMode === "deep",
enabled: hasPremium,
Expand Down
4 changes: 2 additions & 2 deletions frontend/app/aipanel/thinkingmode.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -25,13 +25,13 @@ const ThinkingModeData: Record<ThinkingMode, ThinkingModeMetadata> = {
balanced: {
icon: "fa-sparkles",
name: "Balanced",
desc: "Good mix of speed and accuracy\n(gpt-5 with minimal thinking)",
desc: "Good mix of speed and accuracy\n(gpt-5.1 with minimal thinking)",
premium: true,
},
deep: {
icon: "fa-lightbulb",
name: "Deep",
desc: "Slower but most capable\n(gpt-5 with full reasoning)",
desc: "Slower but most capable\n(gpt-5.1 with full reasoning)",
premium: true,
},
};
Expand Down
2 changes: 1 addition & 1 deletion frontend/app/aipanel/waveai-model.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ export class WaveAIModel {

this.modelAtom = jotai.atom((get) => {
const modelMetaAtom = getOrefMetaKeyAtom(this.orefContext, "waveai:model");
return get(modelMetaAtom) ?? "gpt-5";
return get(modelMetaAtom) ?? "gpt-5.1";
});

this.widgetAccessAtom = jotai.atom((get) => {
Expand Down
2 changes: 1 addition & 1 deletion pkg/aiusechat/openai/openai-convertmessage.go
Original file line number Diff line number Diff line change
Expand Up @@ -278,7 +278,7 @@ func buildOpenAIHTTPRequest(ctx context.Context, inputs []any, chatOpts uctypes.
reqBody.Reasoning = &ReasoningType{
Effort: opts.ThinkingLevel, // low, medium, high map directly
}
if opts.Model == "gpt-5" {
if opts.Model == "gpt-5" || opts.Model == "gpt-5.1" {
reqBody.Reasoning.Summary = "auto"
}
}
Expand Down
5 changes: 3 additions & 2 deletions pkg/aiusechat/uctypes/usechat-types.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ import (
const DefaultAIEndpoint = "https://cfapi.waveterm.dev/api/waveai"
const DefaultAnthropicModel = "claude-sonnet-4-5"
const DefaultOpenAIModel = "gpt-5-mini"
const PremiumOpenAIModel = "gpt-5"
const PremiumOpenAIModel = "gpt-5.1"

type UseChatRequest struct {
Messages []UIMessage `json:"messages"`
Expand Down Expand Up @@ -234,7 +234,7 @@ func (opts AIOptsType) IsWaveProxy() bool {
}

func (opts AIOptsType) IsPremiumModel() bool {
return opts.Model == "gpt-5" || strings.Contains(opts.Model, "claude-sonnet")
return opts.Model == "gpt-5" || opts.Model == "gpt-5.1" || strings.Contains(opts.Model, "claude-sonnet")
}

type AIChat struct {
Expand Down Expand Up @@ -552,6 +552,7 @@ func AreModelsCompatible(apiType, model1, model2 string) bool {

if apiType == "openai" {
gpt5Models := map[string]bool{
"gpt-5.1": true,
"gpt-5": true,
"gpt-5-mini": true,
"gpt-5-nano": true,
Expand Down
2 changes: 1 addition & 1 deletion pkg/aiusechat/usechat.go
Original file line number Diff line number Diff line change
Expand Up @@ -475,7 +475,7 @@ func RunAIChat(ctx context.Context, sseHandler *sse.SSEHandlerCh, chatOpts uctyp
}
}
if stopReason != nil && stopReason.Kind == uctypes.StopKindPremiumRateLimit && chatOpts.Config.APIType == APIType_OpenAI && chatOpts.Config.Model == uctypes.PremiumOpenAIModel {
log.Printf("Premium rate limit hit with gpt-5, switching to gpt-5-mini\n")
log.Printf("Premium rate limit hit with gpt-5.1, switching to gpt-5-mini\n")
cont = &uctypes.WaveContinueResponse{
MessageID: "",
Model: uctypes.DefaultOpenAIModel,
Expand Down
3 changes: 2 additions & 1 deletion pkg/waveai/openaibackend.go
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,8 @@ func isReasoningModel(model string) bool {
return strings.HasPrefix(m, "o1") ||
strings.HasPrefix(m, "o3") ||
strings.HasPrefix(m, "o4") ||
strings.HasPrefix(m, "gpt-5")
strings.HasPrefix(m, "gpt-5") ||
strings.HasPrefix(m, "gpt-5.1")
}

func setApiType(opts *wshrpc.WaveAIOptsType, clientConfig *openaiapi.ClientConfig) error {
Expand Down
Loading