diff --git a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletion.cs b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletion.cs
index fe2f8e8c2e40..22fb6dbd82f5 100644
--- a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletion.cs
+++ b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletion.cs
@@ -107,6 +107,36 @@ public async Task ChatPromptWithInnerContentAsync()
OutputInnerContent(replyInnerContent!);
}
+ ///
+ /// Demonstrates how you can store the output of a chat completion request for use in the OpenAI model distillation or evals products.
+ ///
+ ///
+ /// This sample adds metadata to the chat completion request which allows the requests to be filtered in the OpenAI dashboard.
+ ///
+ [Fact]
+ public async Task ChatPromptStoreWithMetadataAsync()
+ {
+ Assert.NotNull(TestConfiguration.OpenAI.ChatModelId);
+ Assert.NotNull(TestConfiguration.OpenAI.ApiKey);
+
+ StringBuilder chatPrompt = new("""
+ You are a librarian, expert about books
+ Hi, I'm looking for book suggestions about Artificial Intelligence
+ """);
+
+ var kernel = Kernel.CreateBuilder()
+ .AddOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey)
+ .Build();
+
+ var functionResult = await kernel.InvokePromptAsync(chatPrompt.ToString(),
+ new(new OpenAIPromptExecutionSettings { Store = true, Metadata = new Dictionary() { { "concept", "chatcompletion" } } }));
+
+ var messageContent = functionResult.GetValue(); // Retrieves underlying chat message content from FunctionResult.
+ var replyInnerContent = messageContent!.InnerContent as OpenAI.Chat.ChatCompletion; // Retrieves inner content from ChatMessageContent.
+
+ OutputInnerContent(replyInnerContent!);
+ }
+
private async Task StartChatAsync(IChatCompletionService chatGPT)
{
Console.WriteLine("Chat content:");
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/AzureOpenAIPromptExecutionSettingsTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/AzureOpenAIPromptExecutionSettingsTests.cs
index d8ff5b1e0d79..6b4b16c574af 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/AzureOpenAIPromptExecutionSettingsTests.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/AzureOpenAIPromptExecutionSettingsTests.cs
@@ -36,6 +36,8 @@ public void ItCreatesOpenAIExecutionSettingsWithCorrectDefaults()
Assert.Null(executionSettings.Logprobs);
Assert.Null(executionSettings.AzureChatDataSource);
Assert.Equal(maxTokensSettings, executionSettings.MaxTokens);
+ Assert.Null(executionSettings.Store);
+ Assert.Null(executionSettings.Metadata);
}
[Fact]
@@ -54,6 +56,9 @@ public void ItUsesExistingOpenAIExecutionSettings()
Logprobs = true,
TopLogprobs = 5,
TokenSelectionBiases = new Dictionary() { { 1, 2 }, { 3, 4 } },
+ Seed = 123456,
+ Store = true,
+ Metadata = new Dictionary() { { "foo", "bar" } }
};
// Act
@@ -61,6 +66,14 @@ public void ItUsesExistingOpenAIExecutionSettings()
// Assert
Assert.Equal(actualSettings, executionSettings);
+ Assert.Equal(actualSettings, executionSettings);
+ Assert.Equal(actualSettings.MaxTokens, executionSettings.MaxTokens);
+ Assert.Equal(actualSettings.Logprobs, executionSettings.Logprobs);
+ Assert.Equal(actualSettings.TopLogprobs, executionSettings.TopLogprobs);
+ Assert.Equal(actualSettings.TokenSelectionBiases, executionSettings.TokenSelectionBiases);
+ Assert.Equal(actualSettings.Seed, executionSettings.Seed);
+ Assert.Equal(actualSettings.Store, executionSettings.Store);
+ Assert.Equal(actualSettings.Metadata, executionSettings.Metadata);
}
[Fact]
@@ -71,7 +84,9 @@ public void ItCanUseOpenAIExecutionSettings()
{
ExtensionData = new Dictionary() {
{ "max_tokens", 1000 },
- { "temperature", 0 }
+ { "temperature", 0 },
+ { "store", true },
+ { "metadata", new Dictionary() { { "foo", "bar" } } }
}
};
@@ -82,6 +97,8 @@ public void ItCanUseOpenAIExecutionSettings()
Assert.NotNull(executionSettings);
Assert.Equal(1000, executionSettings.MaxTokens);
Assert.Equal(0, executionSettings.Temperature);
+ Assert.True(executionSettings.Store);
+ Assert.Equal(new Dictionary() { { "foo", "bar" } }, executionSettings.Metadata);
}
[Fact]
@@ -103,6 +120,8 @@ public void ItCreatesOpenAIExecutionSettingsFromExtraPropertiesSnakeCase()
{ "seed", 123456 },
{ "logprobs", true },
{ "top_logprobs", 5 },
+ { "store", true },
+ { "metadata", new Dictionary() { { "foo", "bar" } } }
}
};
@@ -131,7 +150,9 @@ public void ItCreatesOpenAIExecutionSettingsFromExtraPropertiesAsStrings()
{ "token_selection_biases", new Dictionary() { { "1", "2" }, { "3", "4" } } },
{ "seed", 123456 },
{ "logprobs", true },
- { "top_logprobs", 5 }
+ { "top_logprobs", 5 },
+ { "store", true },
+ { "metadata", new Dictionary() { { "foo", "bar" } } }
}
};
@@ -158,7 +179,9 @@ public void ItCreatesOpenAIExecutionSettingsFromJsonSnakeCase()
"max_tokens": 128,
"seed": 123456,
"logprobs": true,
- "top_logprobs": 5
+ "top_logprobs": 5,
+ "store": true,
+ "metadata": { "foo": "bar" }
}
""";
var actualSettings = JsonSerializer.Deserialize(json);
@@ -217,7 +240,9 @@ public void PromptExecutionSettingsFreezeWorksAsExpected()
"presence_penalty": 0.0,
"frequency_penalty": 0.0,
"stop_sequences": [ "DONE" ],
- "token_selection_biases": { "1": 2, "3": 4 }
+ "token_selection_biases": { "1": 2, "3": 4 },
+ "store": true,
+ "metadata": { "foo": "bar" }
}
""";
var executionSettings = JsonSerializer.Deserialize(configPayload);
@@ -232,6 +257,8 @@ public void PromptExecutionSettingsFreezeWorksAsExpected()
Assert.Throws(() => executionSettings.TopP = 1);
Assert.Throws(() => executionSettings.StopSequences?.Add("STOP"));
Assert.Throws(() => executionSettings.TokenSelectionBiases?.Add(5, 6));
+ Assert.Throws(() => executionSettings.Store = false);
+ Assert.Throws(() => executionSettings.Metadata?.Add("bar", "foo"));
executionSettings!.Freeze(); // idempotent
Assert.True(executionSettings.IsFrozen);
@@ -267,7 +294,9 @@ public void ItCanCreateAzureOpenAIPromptExecutionSettingsFromOpenAIPromptExecuti
Logprobs = true,
Seed = 123456,
TopLogprobs = 5,
- ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions
+ ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions,
+ Store = true,
+ Metadata = new Dictionary() { { "foo", "bar" } }
};
// Act
@@ -307,5 +336,7 @@ private static void AssertExecutionSettings(AzureOpenAIPromptExecutionSettings e
Assert.Equal(123456, executionSettings.Seed);
Assert.Equal(true, executionSettings.Logprobs);
Assert.Equal(5, executionSettings.TopLogprobs);
+ Assert.Equal(true, executionSettings.Store);
+ Assert.Equal(new Dictionary() { { "foo", "bar" } }, executionSettings.Metadata);
}
}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.ChatCompletion.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.ChatCompletion.cs
index 63d46c7c77e2..bf7859815f1d 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.ChatCompletion.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.ChatCompletion.cs
@@ -49,6 +49,7 @@ protected override ChatCompletionOptions CreateChatCompletionOptions(
EndUserId = executionSettings.User,
TopLogProbabilityCount = executionSettings.TopLogprobs,
IncludeLogProbabilities = executionSettings.Logprobs,
+ StoredOutputEnabled = executionSettings.Store,
};
var responseFormat = GetResponseFormat(executionSettings);
@@ -90,6 +91,14 @@ protected override ChatCompletionOptions CreateChatCompletionOptions(
}
}
+ if (executionSettings.Metadata is not null)
+ {
+ foreach (var kvp in executionSettings.Metadata)
+ {
+ options.Metadata.Add(kvp.Key, kvp.Value);
+ }
+ }
+
if (toolCallingConfig.Options?.AllowParallelCalls is not null)
{
options.AllowParallelToolCalls = toolCallingConfig.Options.AllowParallelCalls;
diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs
index 567c77babeea..90272b94717c 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs
@@ -32,6 +32,8 @@ public void ItCreatesOpenAIExecutionSettingsWithCorrectDefaults()
Assert.Null(executionSettings.TopLogprobs);
Assert.Null(executionSettings.Logprobs);
Assert.Equal(128, executionSettings.MaxTokens);
+ Assert.Null(executionSettings.Store);
+ Assert.Null(executionSettings.Metadata);
}
[Fact]
@@ -44,12 +46,15 @@ public void ItUsesExistingOpenAIExecutionSettings()
TopP = 0.7,
FrequencyPenalty = 0.7,
PresencePenalty = 0.7,
- StopSequences = new string[] { "foo", "bar" },
+ StopSequences = ["foo", "bar"],
ChatSystemPrompt = "chat system prompt",
MaxTokens = 128,
Logprobs = true,
TopLogprobs = 5,
TokenSelectionBiases = new Dictionary() { { 1, 2 }, { 3, 4 } },
+ Seed = 123456,
+ Store = true,
+ Metadata = new Dictionary() { { "foo", "bar" } }
};
// Act
@@ -58,7 +63,13 @@ public void ItUsesExistingOpenAIExecutionSettings()
// Assert
Assert.NotNull(executionSettings);
Assert.Equal(actualSettings, executionSettings);
- Assert.Equal(128, executionSettings.MaxTokens);
+ Assert.Equal(actualSettings.MaxTokens, executionSettings.MaxTokens);
+ Assert.Equal(actualSettings.Logprobs, executionSettings.Logprobs);
+ Assert.Equal(actualSettings.TopLogprobs, executionSettings.TopLogprobs);
+ Assert.Equal(actualSettings.TokenSelectionBiases, executionSettings.TokenSelectionBiases);
+ Assert.Equal(actualSettings.Seed, executionSettings.Seed);
+ Assert.Equal(actualSettings.Store, executionSettings.Store);
+ Assert.Equal(actualSettings.Metadata, executionSettings.Metadata);
}
[Fact]
@@ -69,7 +80,9 @@ public void ItCanUseOpenAIExecutionSettings()
{
ExtensionData = new Dictionary() {
{ "max_tokens", 1000 },
- { "temperature", 0 }
+ { "temperature", 0 },
+ { "store", true },
+ { "metadata", new Dictionary() { { "foo", "bar" } } }
}
};
@@ -80,6 +93,8 @@ public void ItCanUseOpenAIExecutionSettings()
Assert.NotNull(executionSettings);
Assert.Equal(1000, executionSettings.MaxTokens);
Assert.Equal(0, executionSettings.Temperature);
+ Assert.True(executionSettings.Store);
+ Assert.Equal(new Dictionary() { { "foo", "bar" } }, executionSettings.Metadata);
}
[Fact]
@@ -102,6 +117,8 @@ public void ItCreatesOpenAIExecutionSettingsFromExtraPropertiesSnakeCase()
{ "seed", 123456 },
{ "logprobs", true },
{ "top_logprobs", 5 },
+ { "store", true },
+ { "metadata", new Dictionary() { { "foo", "bar" } } }
}
};
@@ -131,7 +148,9 @@ public void ItCreatesOpenAIExecutionSettingsFromExtraPropertiesAsStrings()
{ "token_selection_biases", new Dictionary() { { "1", "2" }, { "3", "4" } } },
{ "seed", 123456 },
{ "logprobs", true },
- { "top_logprobs", 5 }
+ { "top_logprobs", 5 },
+ { "store", true },
+ { "metadata", new Dictionary() { { "foo", "bar" } } }
}
};
@@ -159,7 +178,9 @@ public void ItCreatesOpenAIExecutionSettingsFromJsonSnakeCase()
"max_tokens": 128,
"seed": 123456,
"logprobs": true,
- "top_logprobs": 5
+ "top_logprobs": 5,
+ "store": true,
+ "metadata": { "foo": "bar" }
}
""";
var actualSettings = JsonSerializer.Deserialize(json);
@@ -219,7 +240,12 @@ public void PromptExecutionSettingsFreezeWorksAsExpected()
"presence_penalty": 0.0,
"frequency_penalty": 0.0,
"stop_sequences": [ "DONE" ],
- "token_selection_biases": { "1": 2, "3": 4 }
+ "token_selection_biases": { "1": 2, "3": 4 },
+ "seed": 123456,
+ "logprobs": true,
+ "top_logprobs": 5,
+ "store": true,
+ "metadata": { "foo": "bar" }
}
""";
var executionSettings = JsonSerializer.Deserialize(configPayload);
@@ -234,6 +260,11 @@ public void PromptExecutionSettingsFreezeWorksAsExpected()
Assert.Throws(() => executionSettings.TopP = 1);
Assert.Throws(() => executionSettings.StopSequences?.Add("STOP"));
Assert.Throws(() => executionSettings.TokenSelectionBiases?.Add(5, 6));
+ Assert.Throws(() => executionSettings.Seed = 654321);
+ Assert.Throws(() => executionSettings.Logprobs = false);
+ Assert.Throws(() => executionSettings.TopLogprobs = 10);
+ Assert.Throws(() => executionSettings.Store = false);
+ Assert.Throws(() => executionSettings.Metadata?.Add("bar", "baz"));
executionSettings!.Freeze(); // idempotent
Assert.True(executionSettings.IsFrozen);
@@ -285,5 +316,7 @@ private static void AssertExecutionSettings(OpenAIPromptExecutionSettings execut
Assert.Equal(123456, executionSettings.Seed);
Assert.Equal(true, executionSettings.Logprobs);
Assert.Equal(5, executionSettings.TopLogprobs);
+ Assert.Equal(true, executionSettings.Store);
+ Assert.Equal(new Dictionary() { { "foo", "bar" } }, executionSettings.Metadata);
}
}
diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs
index 9d03c3322964..b14e7b2f1c89 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs
@@ -456,7 +456,8 @@ protected virtual ChatCompletionOptions CreateChatCompletionOptions(
#pragma warning restore OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
EndUserId = executionSettings.User,
TopLogProbabilityCount = executionSettings.TopLogprobs,
- IncludeLogProbabilities = executionSettings.Logprobs
+ IncludeLogProbabilities = executionSettings.Logprobs,
+ StoredOutputEnabled = executionSettings.Store,
};
var responseFormat = GetResponseFormat(executionSettings);
@@ -496,6 +497,14 @@ protected virtual ChatCompletionOptions CreateChatCompletionOptions(
options.AllowParallelToolCalls = toolCallingConfig.Options.AllowParallelCalls;
}
+ if (executionSettings.Metadata is not null)
+ {
+ foreach (var kvp in executionSettings.Metadata)
+ {
+ options.Metadata.Add(kvp.Key, kvp.Value);
+ }
+ }
+
return options;
}
diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIPromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIPromptExecutionSettings.cs
index 3a5e632b7664..add62d564046 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIPromptExecutionSettings.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIPromptExecutionSettings.cs
@@ -289,6 +289,40 @@ public int? TopLogprobs
}
}
+ ///
+ /// Developer-defined tags and values used for filtering completions in the OpenAI dashboard.
+ ///
+ [Experimental("SKEXP0010")]
+ [JsonPropertyName("metadata")]
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public IDictionary? Metadata
+ {
+ get => this._metadata;
+
+ set
+ {
+ this.ThrowIfFrozen();
+ this._metadata = value;
+ }
+ }
+
+ ///
+ /// Whether or not to store the output of this chat completion request for use in the OpenAI model distillation or evals products.
+ ///
+ [Experimental("SKEXP0010")]
+ [JsonPropertyName("store")]
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public bool? Store
+ {
+ get => this._store;
+
+ set
+ {
+ this.ThrowIfFrozen();
+ this._store = value;
+ }
+ }
+
///
public override void Freeze()
{
@@ -308,6 +342,11 @@ public override void Freeze()
{
this._tokenSelectionBiases = new ReadOnlyDictionary(this._tokenSelectionBiases);
}
+
+ if (this._metadata is not null)
+ {
+ this._metadata = new ReadOnlyDictionary(this._metadata);
+ }
}
///
@@ -372,7 +411,9 @@ public static OpenAIPromptExecutionSettings FromExecutionSettings(PromptExecutio
User = this.User,
ChatSystemPrompt = this.ChatSystemPrompt,
Logprobs = this.Logprobs,
- TopLogprobs = this.TopLogprobs
+ TopLogprobs = this.TopLogprobs,
+ Store = this.Store,
+ Metadata = this.Metadata is not null ? new Dictionary(this.Metadata) : null,
};
}
@@ -392,6 +433,8 @@ public static OpenAIPromptExecutionSettings FromExecutionSettings(PromptExecutio
private string? _chatSystemPrompt;
private bool? _logprobs;
private int? _topLogprobs;
+ private bool? _store;
+ private IDictionary? _metadata;
#endregion
}