From b15fa519b39b7334649ee491be78b811f27bdc32 Mon Sep 17 00:00:00 2001 From: "Chia-Yang (Justin) Huang" Date: Thu, 4 Dec 2025 18:21:33 -0800 Subject: [PATCH 1/4] .Net: feat: support none reasoning effort for open ai --- .../Services/AzureOpenAIChatCompletionServiceTests.cs | 1 + .../Services/OpenAIChatCompletionServiceTests.cs | 1 + .../Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs | 3 ++- 3 files changed, 4 insertions(+), 1 deletion(-) diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIChatCompletionServiceTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIChatCompletionServiceTests.cs index a3936414818f..22bc7c956883 100644 --- a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIChatCompletionServiceTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIChatCompletionServiceTests.cs @@ -418,6 +418,7 @@ public async Task GetChatMessageContentsRequestHandlesInternalFieldsCorrectlyAsy [Theory] [InlineData(null, null)] + [InlineData("string", "none")] [InlineData("string", "low")] [InlineData("string", "medium")] [InlineData("string", "high")] diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIChatCompletionServiceTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIChatCompletionServiceTests.cs index 397c9bb0e39d..a84a8aa997c1 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIChatCompletionServiceTests.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIChatCompletionServiceTests.cs @@ -1007,6 +1007,7 @@ public async Task GetChatMessageInResponseFormatsAsync(string formatType, string [Theory] [InlineData(null, null)] + [InlineData("string", "none")] [InlineData("string", "low")] [InlineData("string", "medium")] [InlineData("string", "high")] diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs index 3387601ed189..97da39daf0d6 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs @@ -568,10 +568,11 @@ protected virtual ChatCompletionOptions CreateChatCompletionOptions( { return textEffortLevel.ToUpperInvariant() switch { + "NONE" => new("none"), + "MINIMAL" => ChatReasoningEffortLevel.Minimal, "LOW" => ChatReasoningEffortLevel.Low, "MEDIUM" => ChatReasoningEffortLevel.Medium, "HIGH" => ChatReasoningEffortLevel.High, - "MINIMAL" => new("minimal"), _ => throw new NotSupportedException($"The provided reasoning effort '{textEffortLevel}' is not supported.") }; } From 6678a638c059c619f5604ce90bea2bd2dc9f3629 Mon Sep 17 00:00:00 2001 From: "Chia-Yang (Justin) Huang" Date: Thu, 4 Dec 2025 18:27:35 -0800 Subject: [PATCH 2/4] .NET: docs: add none to param option --- .../Settings/OpenAIPromptExecutionSettings.cs | 75 ++++++++++--------- 1 file changed, 39 insertions(+), 36 deletions(-) diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIPromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIPromptExecutionSettings.cs index 5824fe412f84..957d2398b2ec 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIPromptExecutionSettings.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIPromptExecutionSettings.cs @@ -26,7 +26,7 @@ public class OpenAIPromptExecutionSettings : PromptExecutionSettings /// Constrains effort on reasoning for reasoning models. /// Reducing reasoning effort can result in faster responses and fewer tokens used on reasoning in a response. /// Possible values are: - /// - values: "low", "medium", "high", "minimal"; + /// - values:"none", "minimal", "low", "medium", "high"; /// - object; /// [JsonPropertyName("reasoning_effort")] @@ -51,7 +51,6 @@ public object? ReasoningEffort public double? Temperature { get => this._temperature; - set { this.ThrowIfFrozen(); @@ -69,7 +68,6 @@ public double? Temperature public double? TopP { get => this._topP; - set { this.ThrowIfFrozen(); @@ -87,7 +85,6 @@ public double? TopP public double? PresencePenalty { get => this._presencePenalty; - set { this.ThrowIfFrozen(); @@ -105,7 +102,6 @@ public double? PresencePenalty public double? FrequencyPenalty { get => this._frequencyPenalty; - set { this.ThrowIfFrozen(); @@ -121,7 +117,6 @@ public double? FrequencyPenalty public int? MaxTokens { get => this._maxTokens; - set { this.ThrowIfFrozen(); @@ -137,7 +132,6 @@ public int? MaxTokens public IList? StopSequences { get => this._stopSequences; - set { this.ThrowIfFrozen(); @@ -154,7 +148,6 @@ public IList? StopSequences public long? Seed { get => this._seed; - set { this.ThrowIfFrozen(); @@ -180,7 +173,6 @@ public long? Seed public object? ResponseFormat { get => this._responseFormat; - set { this.ThrowIfFrozen(); @@ -197,7 +189,6 @@ public object? ResponseFormat public string? ChatSystemPrompt { get => this._chatSystemPrompt; - set { this.ThrowIfFrozen(); @@ -215,7 +206,6 @@ public string? ChatSystemPrompt public string? ChatDeveloperPrompt { get => this._chatDeveloperPrompt; - set { this.ThrowIfFrozen(); @@ -231,7 +221,6 @@ public string? ChatDeveloperPrompt public IDictionary? TokenSelectionBiases { get => this._tokenSelectionBiases; - set { this.ThrowIfFrozen(); @@ -272,7 +261,6 @@ public IDictionary? TokenSelectionBiases public ToolCallBehavior? ToolCallBehavior { get => this._toolCallBehavior; - set { this.ThrowIfFrozen(); @@ -288,7 +276,6 @@ public ToolCallBehavior? ToolCallBehavior public string? User { get => this._user; - set { this.ThrowIfFrozen(); @@ -306,7 +293,6 @@ public string? User public bool? Logprobs { get => this._logprobs; - set { this.ThrowIfFrozen(); @@ -322,7 +308,6 @@ public bool? Logprobs public int? TopLogprobs { get => this._topLogprobs; - set { this.ThrowIfFrozen(); @@ -338,7 +323,6 @@ public int? TopLogprobs public IDictionary? Metadata { get => this._metadata; - set { this.ThrowIfFrozen(); @@ -355,7 +339,6 @@ public IDictionary? Metadata public bool? Store { get => this._store; - set { this.ThrowIfFrozen(); @@ -385,7 +368,6 @@ public bool? Store public object? WebSearchOptions { get => this._webSearchOptions; - set { this.ThrowIfFrozen(); @@ -413,7 +395,6 @@ public object? WebSearchOptions public object? Modalities { get => this._responseModalities; - set { this.ThrowIfFrozen(); @@ -439,7 +420,6 @@ public object? Modalities public object? Audio { get => this._audioOptions; - set { this.ThrowIfFrozen(); @@ -464,7 +444,9 @@ public override void Freeze() if (this._tokenSelectionBiases is not null) { - this._tokenSelectionBiases = new ReadOnlyDictionary(this._tokenSelectionBiases); + this._tokenSelectionBiases = new ReadOnlyDictionary( + this._tokenSelectionBiases + ); } if (this._metadata is not null) @@ -485,14 +467,14 @@ public override PromptExecutionSettings Clone() /// Template configuration /// Default max tokens /// An instance of OpenAIPromptExecutionSettings - public static OpenAIPromptExecutionSettings FromExecutionSettings(PromptExecutionSettings? executionSettings, int? defaultMaxTokens = null) + public static OpenAIPromptExecutionSettings FromExecutionSettings( + PromptExecutionSettings? executionSettings, + int? defaultMaxTokens = null + ) { if (executionSettings is null) { - return new OpenAIPromptExecutionSettings() - { - MaxTokens = defaultMaxTokens - }; + return new OpenAIPromptExecutionSettings() { MaxTokens = defaultMaxTokens }; } if (executionSettings is OpenAIPromptExecutionSettings settings) @@ -502,7 +484,10 @@ public static OpenAIPromptExecutionSettings FromExecutionSettings(PromptExecutio var json = JsonSerializer.Serialize(executionSettings); - var openAIExecutionSettings = JsonSerializer.Deserialize(json, JsonOptionsCache.ReadPermissive); + var openAIExecutionSettings = JsonSerializer.Deserialize( + json, + JsonOptionsCache.ReadPermissive + ); // Restore the function choice behavior that lost internal state(list of function instances) during serialization/deserialization process. openAIExecutionSettings!.FunctionChoiceBehavior = executionSettings.FunctionChoiceBehavior; @@ -515,21 +500,28 @@ public static OpenAIPromptExecutionSettings FromExecutionSettings(PromptExecutio /// /// The type of the settings object to clone. /// A new instance of the settings object. - protected internal T Clone() where T : OpenAIPromptExecutionSettings, new() + protected internal T Clone() + where T : OpenAIPromptExecutionSettings, new() { return new T() { ModelId = this.ModelId, - ExtensionData = this.ExtensionData is not null ? new Dictionary(this.ExtensionData) : null, + ExtensionData = this.ExtensionData is not null + ? new Dictionary(this.ExtensionData) + : null, Temperature = this.Temperature, TopP = this.TopP, PresencePenalty = this.PresencePenalty, FrequencyPenalty = this.FrequencyPenalty, MaxTokens = this.MaxTokens, - StopSequences = this.StopSequences is not null ? new List(this.StopSequences) : null, + StopSequences = this.StopSequences is not null + ? new List(this.StopSequences) + : null, Seed = this.Seed, ResponseFormat = this.ResponseFormat, - TokenSelectionBiases = this.TokenSelectionBiases is not null ? new Dictionary(this.TokenSelectionBiases) : null, + TokenSelectionBiases = this.TokenSelectionBiases is not null + ? new Dictionary(this.TokenSelectionBiases) + : null, ToolCallBehavior = this.ToolCallBehavior, FunctionChoiceBehavior = this.FunctionChoiceBehavior, User = this.User, @@ -538,7 +530,9 @@ public static OpenAIPromptExecutionSettings FromExecutionSettings(PromptExecutio Logprobs = this.Logprobs, TopLogprobs = this.TopLogprobs, Store = this.Store, - Metadata = this.Metadata is not null ? new Dictionary(this.Metadata) : null, + Metadata = this.Metadata is not null + ? new Dictionary(this.Metadata) + : null, ReasoningEffort = this.ReasoningEffort, WebSearchOptions = this.WebSearchOptions, Modalities = this.Modalities, @@ -550,12 +544,21 @@ public static OpenAIPromptExecutionSettings FromExecutionSettings(PromptExecutio protected override ChatHistory PrepareChatHistoryForRequest(ChatHistory chatHistory) { // Inserts system and developer prompts at the beginning of the chat history if they are not already present. - if (!string.IsNullOrWhiteSpace(this.ChatDeveloperPrompt) && !chatHistory.Any(m => m.Role == AuthorRole.Developer)) + if ( + !string.IsNullOrWhiteSpace(this.ChatDeveloperPrompt) + && !chatHistory.Any(m => m.Role == AuthorRole.Developer) + ) { - chatHistory.Insert(0, new ChatMessageContent(AuthorRole.Developer, this.ChatDeveloperPrompt)); + chatHistory.Insert( + 0, + new ChatMessageContent(AuthorRole.Developer, this.ChatDeveloperPrompt) + ); } - if (!string.IsNullOrWhiteSpace(this.ChatSystemPrompt) && !chatHistory.Any(m => m.Role == AuthorRole.System)) + if ( + !string.IsNullOrWhiteSpace(this.ChatSystemPrompt) + && !chatHistory.Any(m => m.Role == AuthorRole.System) + ) { chatHistory.Insert(0, new ChatMessageContent(AuthorRole.System, this.ChatSystemPrompt)); } From 003baa85693592a5211f8953cf9bd1dbbe15ec22 Mon Sep 17 00:00:00 2001 From: "Chia-Yang (Justin) Huang" Date: Thu, 4 Dec 2025 18:28:37 -0800 Subject: [PATCH 3/4] .NET: test: reorder test set --- .../Services/AzureOpenAIChatCompletionServiceTests.cs | 2 +- .../Services/OpenAIChatCompletionServiceTests.cs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIChatCompletionServiceTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIChatCompletionServiceTests.cs index 22bc7c956883..17622af543a3 100644 --- a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIChatCompletionServiceTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIChatCompletionServiceTests.cs @@ -419,10 +419,10 @@ public async Task GetChatMessageContentsRequestHandlesInternalFieldsCorrectlyAsy [Theory] [InlineData(null, null)] [InlineData("string", "none")] + [InlineData("string", "minimal")] [InlineData("string", "low")] [InlineData("string", "medium")] [InlineData("string", "high")] - [InlineData("string", "minimal")] [InlineData("ChatReasonEffortLevel.Low", "low")] [InlineData("ChatReasonEffortLevel.Medium", "medium")] [InlineData("ChatReasonEffortLevel.High", "high")] diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIChatCompletionServiceTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIChatCompletionServiceTests.cs index a84a8aa997c1..f62a04c641b7 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIChatCompletionServiceTests.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIChatCompletionServiceTests.cs @@ -1008,10 +1008,10 @@ public async Task GetChatMessageInResponseFormatsAsync(string formatType, string [Theory] [InlineData(null, null)] [InlineData("string", "none")] + [InlineData("string", "minimal")] [InlineData("string", "low")] [InlineData("string", "medium")] [InlineData("string", "high")] - [InlineData("string", "minimal")] [InlineData("ChatReasonEffortLevel.Low", "low")] [InlineData("ChatReasonEffortLevel.Medium", "medium")] [InlineData("ChatReasonEffortLevel.High", "high")] From 06512dab9657623db294b6d42070d96a6834af61 Mon Sep 17 00:00:00 2001 From: "Chia-Yang (Justin) Huang" Date: Thu, 4 Dec 2025 18:35:13 -0800 Subject: [PATCH 4/4] .NET: style: revert format --- .../Settings/OpenAIPromptExecutionSettings.cs | 73 +++++++++---------- 1 file changed, 35 insertions(+), 38 deletions(-) diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIPromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIPromptExecutionSettings.cs index 957d2398b2ec..e8406758210e 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIPromptExecutionSettings.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIPromptExecutionSettings.cs @@ -51,6 +51,7 @@ public object? ReasoningEffort public double? Temperature { get => this._temperature; + set { this.ThrowIfFrozen(); @@ -68,6 +69,7 @@ public double? Temperature public double? TopP { get => this._topP; + set { this.ThrowIfFrozen(); @@ -85,6 +87,7 @@ public double? TopP public double? PresencePenalty { get => this._presencePenalty; + set { this.ThrowIfFrozen(); @@ -102,6 +105,7 @@ public double? PresencePenalty public double? FrequencyPenalty { get => this._frequencyPenalty; + set { this.ThrowIfFrozen(); @@ -117,6 +121,7 @@ public double? FrequencyPenalty public int? MaxTokens { get => this._maxTokens; + set { this.ThrowIfFrozen(); @@ -132,6 +137,7 @@ public int? MaxTokens public IList? StopSequences { get => this._stopSequences; + set { this.ThrowIfFrozen(); @@ -148,6 +154,7 @@ public IList? StopSequences public long? Seed { get => this._seed; + set { this.ThrowIfFrozen(); @@ -173,6 +180,7 @@ public long? Seed public object? ResponseFormat { get => this._responseFormat; + set { this.ThrowIfFrozen(); @@ -189,6 +197,7 @@ public object? ResponseFormat public string? ChatSystemPrompt { get => this._chatSystemPrompt; + set { this.ThrowIfFrozen(); @@ -206,6 +215,7 @@ public string? ChatSystemPrompt public string? ChatDeveloperPrompt { get => this._chatDeveloperPrompt; + set { this.ThrowIfFrozen(); @@ -221,6 +231,7 @@ public string? ChatDeveloperPrompt public IDictionary? TokenSelectionBiases { get => this._tokenSelectionBiases; + set { this.ThrowIfFrozen(); @@ -261,6 +272,7 @@ public IDictionary? TokenSelectionBiases public ToolCallBehavior? ToolCallBehavior { get => this._toolCallBehavior; + set { this.ThrowIfFrozen(); @@ -276,6 +288,7 @@ public ToolCallBehavior? ToolCallBehavior public string? User { get => this._user; + set { this.ThrowIfFrozen(); @@ -293,6 +306,7 @@ public string? User public bool? Logprobs { get => this._logprobs; + set { this.ThrowIfFrozen(); @@ -308,6 +322,7 @@ public bool? Logprobs public int? TopLogprobs { get => this._topLogprobs; + set { this.ThrowIfFrozen(); @@ -323,6 +338,7 @@ public int? TopLogprobs public IDictionary? Metadata { get => this._metadata; + set { this.ThrowIfFrozen(); @@ -339,6 +355,7 @@ public IDictionary? Metadata public bool? Store { get => this._store; + set { this.ThrowIfFrozen(); @@ -368,6 +385,7 @@ public bool? Store public object? WebSearchOptions { get => this._webSearchOptions; + set { this.ThrowIfFrozen(); @@ -395,6 +413,7 @@ public object? WebSearchOptions public object? Modalities { get => this._responseModalities; + set { this.ThrowIfFrozen(); @@ -420,6 +439,7 @@ public object? Modalities public object? Audio { get => this._audioOptions; + set { this.ThrowIfFrozen(); @@ -444,9 +464,7 @@ public override void Freeze() if (this._tokenSelectionBiases is not null) { - this._tokenSelectionBiases = new ReadOnlyDictionary( - this._tokenSelectionBiases - ); + this._tokenSelectionBiases = new ReadOnlyDictionary(this._tokenSelectionBiases); } if (this._metadata is not null) @@ -467,14 +485,14 @@ public override PromptExecutionSettings Clone() /// Template configuration /// Default max tokens /// An instance of OpenAIPromptExecutionSettings - public static OpenAIPromptExecutionSettings FromExecutionSettings( - PromptExecutionSettings? executionSettings, - int? defaultMaxTokens = null - ) + public static OpenAIPromptExecutionSettings FromExecutionSettings(PromptExecutionSettings? executionSettings, int? defaultMaxTokens = null) { if (executionSettings is null) { - return new OpenAIPromptExecutionSettings() { MaxTokens = defaultMaxTokens }; + return new OpenAIPromptExecutionSettings() + { + MaxTokens = defaultMaxTokens + }; } if (executionSettings is OpenAIPromptExecutionSettings settings) @@ -484,10 +502,7 @@ public static OpenAIPromptExecutionSettings FromExecutionSettings( var json = JsonSerializer.Serialize(executionSettings); - var openAIExecutionSettings = JsonSerializer.Deserialize( - json, - JsonOptionsCache.ReadPermissive - ); + var openAIExecutionSettings = JsonSerializer.Deserialize(json, JsonOptionsCache.ReadPermissive); // Restore the function choice behavior that lost internal state(list of function instances) during serialization/deserialization process. openAIExecutionSettings!.FunctionChoiceBehavior = executionSettings.FunctionChoiceBehavior; @@ -500,28 +515,21 @@ public static OpenAIPromptExecutionSettings FromExecutionSettings( /// /// The type of the settings object to clone. /// A new instance of the settings object. - protected internal T Clone() - where T : OpenAIPromptExecutionSettings, new() + protected internal T Clone() where T : OpenAIPromptExecutionSettings, new() { return new T() { ModelId = this.ModelId, - ExtensionData = this.ExtensionData is not null - ? new Dictionary(this.ExtensionData) - : null, + ExtensionData = this.ExtensionData is not null ? new Dictionary(this.ExtensionData) : null, Temperature = this.Temperature, TopP = this.TopP, PresencePenalty = this.PresencePenalty, FrequencyPenalty = this.FrequencyPenalty, MaxTokens = this.MaxTokens, - StopSequences = this.StopSequences is not null - ? new List(this.StopSequences) - : null, + StopSequences = this.StopSequences is not null ? new List(this.StopSequences) : null, Seed = this.Seed, ResponseFormat = this.ResponseFormat, - TokenSelectionBiases = this.TokenSelectionBiases is not null - ? new Dictionary(this.TokenSelectionBiases) - : null, + TokenSelectionBiases = this.TokenSelectionBiases is not null ? new Dictionary(this.TokenSelectionBiases) : null, ToolCallBehavior = this.ToolCallBehavior, FunctionChoiceBehavior = this.FunctionChoiceBehavior, User = this.User, @@ -530,9 +538,7 @@ protected internal T Clone() Logprobs = this.Logprobs, TopLogprobs = this.TopLogprobs, Store = this.Store, - Metadata = this.Metadata is not null - ? new Dictionary(this.Metadata) - : null, + Metadata = this.Metadata is not null ? new Dictionary(this.Metadata) : null, ReasoningEffort = this.ReasoningEffort, WebSearchOptions = this.WebSearchOptions, Modalities = this.Modalities, @@ -544,21 +550,12 @@ protected internal T Clone() protected override ChatHistory PrepareChatHistoryForRequest(ChatHistory chatHistory) { // Inserts system and developer prompts at the beginning of the chat history if they are not already present. - if ( - !string.IsNullOrWhiteSpace(this.ChatDeveloperPrompt) - && !chatHistory.Any(m => m.Role == AuthorRole.Developer) - ) + if (!string.IsNullOrWhiteSpace(this.ChatDeveloperPrompt) && !chatHistory.Any(m => m.Role == AuthorRole.Developer)) { - chatHistory.Insert( - 0, - new ChatMessageContent(AuthorRole.Developer, this.ChatDeveloperPrompt) - ); + chatHistory.Insert(0, new ChatMessageContent(AuthorRole.Developer, this.ChatDeveloperPrompt)); } - if ( - !string.IsNullOrWhiteSpace(this.ChatSystemPrompt) - && !chatHistory.Any(m => m.Role == AuthorRole.System) - ) + if (!string.IsNullOrWhiteSpace(this.ChatSystemPrompt) && !chatHistory.Any(m => m.Role == AuthorRole.System)) { chatHistory.Insert(0, new ChatMessageContent(AuthorRole.System, this.ChatSystemPrompt)); }