diff --git a/dotnet/src/Agents/OpenAI/Internal/AssistantRunOptionsFactory.cs b/dotnet/src/Agents/OpenAI/Internal/AssistantRunOptionsFactory.cs index 805b4b33b20a..cda0399f5e28 100644 --- a/dotnet/src/Agents/OpenAI/Internal/AssistantRunOptionsFactory.cs +++ b/dotnet/src/Agents/OpenAI/Internal/AssistantRunOptionsFactory.cs @@ -65,5 +65,5 @@ public static RunCreationOptions GenerateOptions(OpenAIAssistantDefinition defin => setting.HasValue && (!agentSetting.HasValue || !EqualityComparer.Default.Equals(setting.Value, agentSetting.Value)) ? setting.Value : - null; + agentSetting; } diff --git a/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs b/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs index 28d1ad0779ce..b9a9c62dcbe4 100644 --- a/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs +++ b/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs @@ -28,13 +28,6 @@ internal static class AssistantThreadActions RunStatus.Cancelling, ]; - private static readonly HashSet s_terminalStatuses = - [ - RunStatus.Expired, - RunStatus.Failed, - RunStatus.Cancelled, - ]; - /// /// Create a new assistant thread. /// @@ -199,7 +192,7 @@ public static async IAsyncEnumerable GetMessagesAsync(Assist await PollRunStatusAsync().ConfigureAwait(false); // Is in terminal state? - if (s_terminalStatuses.Contains(run.Status)) + if (run.Status.IsTerminal && run.Status != RunStatus.Completed) { throw new KernelException($"Agent Failure - Run terminated: {run.Status} [{run.Id}]: {run.LastError?.Message ?? "Unknown"}"); } @@ -487,7 +480,7 @@ public static async IAsyncEnumerable InvokeStreamin } // Is in terminal state? - if (s_terminalStatuses.Contains(run.Status)) + if (run.Status.IsTerminal && run.Status != RunStatus.Completed) { throw new KernelException($"Agent Failure - Run terminated: {run.Status} [{run.Id}]: {run.LastError?.Message ?? "Unknown"}"); } diff --git a/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantRunOptionsFactoryTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantRunOptionsFactoryTests.cs index a4a0f491e177..15fd0d6aa5ae 100644 --- a/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantRunOptionsFactoryTests.cs +++ b/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantRunOptionsFactoryTests.cs @@ -39,9 +39,9 @@ public void AssistantRunOptionsFactoryExecutionOptionsNullTest() Assert.NotNull(options); Assert.Empty(options.AdditionalMessages); Assert.Null(options.InstructionsOverride); - Assert.Null(options.Temperature); Assert.Null(options.NucleusSamplingFactor); Assert.Equal("test", options.AdditionalInstructions); + Assert.Equal(0.5F, options.Temperature); Assert.Empty(options.Metadata); } @@ -69,9 +69,9 @@ public void AssistantRunOptionsFactoryExecutionOptionsEquivalentTest() // Assert Assert.NotNull(options); - Assert.Equal("test", options.InstructionsOverride); - Assert.Null(options.Temperature); Assert.Null(options.NucleusSamplingFactor); + Assert.Equal("test", options.InstructionsOverride); + Assert.Equal(0.5F, options.Temperature); } /// @@ -174,4 +174,31 @@ public void AssistantRunOptionsFactoryExecutionOptionsMessagesTest() // Assert Assert.Single(options.AdditionalMessages); } + + /// + /// Verify run options generation with metadata. + /// + [Fact] + public void AssistantRunOptionsFactoryExecutionOptionsMaxTokensTest() + { + // Arrange + OpenAIAssistantDefinition definition = + new("gpt-anything") + { + Temperature = 0.5F, + ExecutionOptions = + new() + { + MaxCompletionTokens = 4096, + MaxPromptTokens = 1024, + }, + }; + + // Act + RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(definition, null, null); + + // Assert + Assert.Equal(1024, options.MaxInputTokenCount); + Assert.Equal(4096, options.MaxOutputTokenCount); + } } diff --git a/dotnet/src/IntegrationTests/Agents/OpenAIAssistantAgentTests.cs b/dotnet/src/IntegrationTests/Agents/OpenAIAssistantAgentTests.cs index 43b133d87ddc..2bf263b741d3 100644 --- a/dotnet/src/IntegrationTests/Agents/OpenAIAssistantAgentTests.cs +++ b/dotnet/src/IntegrationTests/Agents/OpenAIAssistantAgentTests.cs @@ -127,6 +127,44 @@ await OpenAIAssistantAgent.CreateAsync( finally { await agent.DeleteThreadAsync(threadId); + await agent.DeleteAsync(); + } + } + + /// + /// Integration test for using function calling + /// and targeting Azure OpenAI services. + /// + [RetryFact(typeof(HttpOperationException))] + public async Task AzureOpenAIAssistantAgentTokensAsync() + { + var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); + Assert.NotNull(azureOpenAIConfiguration); + + OpenAIAssistantAgent agent = + await OpenAIAssistantAgent.CreateAsync( + OpenAIClientProvider.ForAzureOpenAI(new AzureCliCredential(), new Uri(azureOpenAIConfiguration.Endpoint)), + new(azureOpenAIConfiguration.ChatDeploymentName!) + { + Instructions = "Repeat the user all of the user messages", + ExecutionOptions = new() + { + MaxCompletionTokens = 16, + } + }, + new Kernel()); + + string threadId = await agent.CreateThreadAsync(); + ChatMessageContent functionResultMessage = new(AuthorRole.User, "A long time ago there lived a king who was famed for his wisdom through all the land. Nothing was hidden from him, and it seemed as if news of the most secret things was brought to him through the air. But he had a strange custom; every day after dinner, when the table was cleared, and no one else was present, a trusty servant had to bring him one more dish. It was covered, however, and even the servant did not know what was in it, neither did anyone know, for the king never took off the cover to eat of it until he was quite alone."); + try + { + await agent.AddChatMessageAsync(threadId, functionResultMessage); + await Assert.ThrowsAsync(() => agent.InvokeAsync(threadId).ToArrayAsync().AsTask()); + } + finally + { + await agent.DeleteThreadAsync(threadId); + await agent.DeleteAsync(); } }