From c2895a37493fe03e776fa839cd576deb4a563553 Mon Sep 17 00:00:00 2001 From: Mark Wallace <127216156+markwallace-microsoft@users.noreply.github.com> Date: Wed, 11 Dec 2024 21:52:21 +0000 Subject: [PATCH] Python: Add store and metadata properties to OpenAIPromptExecutionSettings (#9946) ### Motivation and Context Closes #9918 ### Description ### Contribution Checklist - [ ] The code builds clean without any errors or warnings - [ ] The PR follows the [SK Contribution Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md) and the [pre-submission formatting script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts) raises no violations - [ ] All unit tests pass, and I have added new tests where possible - [ ] I didn't break anyone :smile: --------- Co-authored-by: Tao Chen --- .../simple_chatbot_store_metadata.py | 85 +++++++++++++++++++ .../open_ai_prompt_execution_settings.py | 2 + 2 files changed, 87 insertions(+) create mode 100644 python/samples/concepts/chat_completion/simple_chatbot_store_metadata.py diff --git a/python/samples/concepts/chat_completion/simple_chatbot_store_metadata.py b/python/samples/concepts/chat_completion/simple_chatbot_store_metadata.py new file mode 100644 index 000000000000..44484aed7122 --- /dev/null +++ b/python/samples/concepts/chat_completion/simple_chatbot_store_metadata.py @@ -0,0 +1,85 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio + +from samples.concepts.setup.chat_completion_services import ( + Services, + get_chat_completion_service_and_request_settings, +) +from semantic_kernel.contents import ChatHistory + +# This sample shows how to create a chatbot whose output can be stored for use with the OpenAI +# model distillation or evals products. +# This sample uses the following two main components: +# - a ChatCompletionService: This component is responsible for generating responses to user messages. +# - a ChatHistory: This component is responsible for keeping track of the chat history. +# The chatbot in this sample is called Mosscap, who is an expert in basketball. + +# To learn more about OpenAI distillation, see: https://platform.openai.com/docs/guides/distillation +# To learn more about OpenAI evals, see: https://platform.openai.com/docs/guides/evals + + +# You can select from the following chat completion services: +# - Services.OPENAI +# Please make sure you have configured your environment correctly for the selected chat completion service. +chat_completion_service, request_settings = get_chat_completion_service_and_request_settings(Services.OPENAI) + +# This is the system message that gives the chatbot its personality. +system_message = """ +You are a chat bot whose expertise is basketball. +Your name is Mosscap and you have one goal: to answer questions about basketball. +""" + +# Create a chat history object with the system message. +chat_history = ChatHistory(system_message=system_message) +# Configure the store amd metadata settings for the chat completion service. +request_settings.store = True +request_settings.metadata = {"chatbot": "Mosscap"} + + +async def chat() -> bool: + try: + user_input = input("User:> ") + except KeyboardInterrupt: + print("\n\nExiting chat...") + return False + except EOFError: + print("\n\nExiting chat...") + return False + + if user_input == "exit": + print("\n\nExiting chat...") + return False + + # Add the user message to the chat history so that the chatbot can respond to it. + chat_history.add_user_message(user_input) + + # Get the chat message content from the chat completion service. + response = await chat_completion_service.get_chat_message_content( + chat_history=chat_history, + settings=request_settings, + ) + if response: + print(f"Mosscap:> {response}") + + # Add the chat message to the chat history to keep track of the conversation. + chat_history.add_message(response) + + return True + + +async def main() -> None: + # Start the chat loop. The chat loop will continue until the user types "exit". + chatting = True + while chatting: + chatting = await chat() + + # Sample output: + # User:> Who has the most career points in NBA history? + # Mosscap:> As of October 2023, the all-time leader in total regular-season scoring in the history of the National + # Basketball Association (N.B.A.) is Kareem Abdul-Jabbar, who scored 38,387 total regular-seasonPoints + # during his illustrious 20-year playing Career. + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_prompt_execution_settings.py b/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_prompt_execution_settings.py index 1ff6c993ea24..12451d35296f 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_prompt_execution_settings.py +++ b/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_prompt_execution_settings.py @@ -33,6 +33,8 @@ class OpenAIPromptExecutionSettings(PromptExecutionSettings): temperature: Annotated[float | None, Field(ge=0.0, le=2.0)] = None top_p: Annotated[float | None, Field(ge=0.0, le=1.0)] = None user: str | None = None + store: bool | None = None + metadata: dict[str, str] | None = None class OpenAITextPromptExecutionSettings(OpenAIPromptExecutionSettings):