From df11762182c26ec92c62b2594ce792c98e6d201e Mon Sep 17 00:00:00 2001
From: alexwolfmsft <93200798+alexwolfmsft@users.noreply.github.com>
Date: Fri, 20 Dec 2024 11:37:03 -0500
Subject: [PATCH] Ai freshness pass (#44038)
* Freshness pass
---------
Co-authored-by: Genevieve Warren <24882762+gewarren@users.noreply.github.com>
---
docs/ai/azure-ai-for-dotnet-developers.md | 2 +-
docs/ai/conceptual/embeddings.md | 4 +-
.../understanding-openai-functions.md | 2 +-
docs/ai/conceptual/understanding-tokens.md | 10 ++---
...-chat-scaling-with-azure-container-apps.md | 2 +-
docs/ai/get-started-app-chat-template.md | 2 +-
docs/ai/how-to/content-filtering.md | 28 ++++--------
.../AIContentFiltering.csproj | 6 ++-
.../snippets/content-filtering/Program.cs | 45 ++++++-------------
docs/ai/index.yml | 2 +-
docs/ai/quickstarts/quickstart-local-ai.md | 2 +-
11 files changed, 37 insertions(+), 68 deletions(-)
diff --git a/docs/ai/azure-ai-for-dotnet-developers.md b/docs/ai/azure-ai-for-dotnet-developers.md
index b99a041519c47..e19c28f8cff03 100644
--- a/docs/ai/azure-ai-for-dotnet-developers.md
+++ b/docs/ai/azure-ai-for-dotnet-developers.md
@@ -1,7 +1,7 @@
---
title: Develop .NET apps that use Azure AI services
description: This article provides an organized list of resources about Azure AI scenarios for .NET developers, including documentation and code samples.
-ms.date: 05/17/2024
+ms.date: 12/19/2024
ms.topic: overview
ms.custom: devx-track-dotnet, devx-track-dotnet-ai
---
diff --git a/docs/ai/conceptual/embeddings.md b/docs/ai/conceptual/embeddings.md
index bfee6673f3284..d041255d56bb5 100644
--- a/docs/ai/conceptual/embeddings.md
+++ b/docs/ai/conceptual/embeddings.md
@@ -3,7 +3,7 @@ title: "How Embeddings Extend Your AI Model's Reach"
description: "Learn how embeddings extend the limits and capabilities of AI models in .NET."
author: catbutler
ms.topic: concept-article #Don't change.
-ms.date: 05/14/2024
+ms.date: 12/19/2024
#customer intent: As a .NET developer, I want to understand how embeddings extend LLM limits and capabilities in .NET so that I have more semantic context and better outcomes for my AI apps.
@@ -34,7 +34,7 @@ Use embeddings to help a model understand the meaning and context of text, and t
Use audio embeddings to process audio files or inputs in your app.
-For example, [Speech service](/azure/ai-services/speech-service/) supports a range of audio embeddings, including [speech to text](/azure/ai-services/speech-service/speech-to-text) and [text to speech](/azure/ai-services/speech-service/text-to-speech). You can process audio in real-time or in batches.
+For example, [Azure AI Speech](/azure/ai-services/speech-service/) supports a range of audio embeddings, including [speech to text](/azure/ai-services/speech-service/speech-to-text) and [text to speech](/azure/ai-services/speech-service/text-to-speech). You can process audio in real-time or in batches.
### Turn text into images or images into text
diff --git a/docs/ai/conceptual/understanding-openai-functions.md b/docs/ai/conceptual/understanding-openai-functions.md
index 0f125eac1a7ec..e5d90460e86f2 100644
--- a/docs/ai/conceptual/understanding-openai-functions.md
+++ b/docs/ai/conceptual/understanding-openai-functions.md
@@ -3,7 +3,7 @@ title: "Understanding OpenAI Function Calling"
description: "Understand how function calling enables you to integrate external tools with your OpenAI application."
author: haywoodsloan
ms.topic: concept-article
-ms.date: 05/14/2024
+ms.date: 12/19/2024
#customer intent: As a .NET developer, I want to understand OpenAI function calling so that I can integrate external tools with AI completions in my .NET project.
diff --git a/docs/ai/conceptual/understanding-tokens.md b/docs/ai/conceptual/understanding-tokens.md
index 910d5ccd93206..bd804c5971403 100644
--- a/docs/ai/conceptual/understanding-tokens.md
+++ b/docs/ai/conceptual/understanding-tokens.md
@@ -3,7 +3,7 @@ title: "Understanding tokens"
description: "Understand how large language models (LLMs) use tokens to analyze semantic relationships and generate natural language outputs"
author: haywoodsloan
ms.topic: concept-article
-ms.date: 05/14/2024
+ms.date: 12/19/2024
#customer intent: As a .NET developer, I want understand how large language models (LLMs) use tokens so I can add semantic analysis and text generation capabilities to my .NET projects.
@@ -11,7 +11,7 @@ ms.date: 05/14/2024
# Understand tokens
-Tokens are words, character sets, or combinations of words and punctuation that are used by large language models (LLMs) to decompose text into. Tokenization is the first step in training. The LLM analyzes the semantic relationships between tokens, such as how commonly they're used together or whether they're used in similar contexts. After training, the LLM uses those patterns and relationships to generate a sequence of output tokens based on the input sequence.
+Tokens are words, character sets, or combinations of words and punctuation that are generated by large language models (LLMs) when they decompose text. Tokenization is the first step in training. The LLM analyzes the semantic relationships between tokens, such as how commonly they're used together or whether they're used in similar contexts. After training, the LLM uses those patterns and relationships to generate a sequence of output tokens based on the input sequence.
## Turning text into tokens
@@ -89,11 +89,7 @@ Output generation is an iterative operation. The model appends the predicted tok
### Token limits
-LLMs have limitations regarding the maximum number of tokens that can be used as input or generated as output. This limitation often causes the input and output tokens to be combined into a maximum context window.
-
-For example, GPT-4 supports up to 8,192 tokens of context. The combined size of the input and output tokens can't exceed 8,192.
-
-Taken together, a model's token limit and tokenization method determine the maximum length of text that can be provided as input or generated as output.
+LLMs have limitations regarding the maximum number of tokens that can be used as input or generated as output. This limitation often causes the input and output tokens to be combined into a maximum context window. Taken together, a model's token limit and tokenization method determine the maximum length of text that can be provided as input or generated as output.
For example, consider a model that has a maximum context window of 100 tokens. The model processes our example sentences as input text:
diff --git a/docs/ai/get-started-app-chat-scaling-with-azure-container-apps.md b/docs/ai/get-started-app-chat-scaling-with-azure-container-apps.md
index 02551582304ee..312fab8f0d17d 100644
--- a/docs/ai/get-started-app-chat-scaling-with-azure-container-apps.md
+++ b/docs/ai/get-started-app-chat-scaling-with-azure-container-apps.md
@@ -1,7 +1,7 @@
---
title: Scale Azure OpenAI for .NET chat sample using RAG
description: Learn how to add load balancing to your application to extend the chat app beyond the Azure OpenAI token and model quota limits.
-ms.date: 05/16/2024
+ms.date: 12/19/2024
ms.topic: get-started
ms.custom: devx-track-dotnet, devx-track-dotnet-ai
# CustomerIntent: As a .NET developer new to Azure OpenAI, I want to scale my Azure OpenAI capacity to avoid rate limit errors with Azure Container Apps.
diff --git a/docs/ai/get-started-app-chat-template.md b/docs/ai/get-started-app-chat-template.md
index c8c90816af53c..103d4979a36f5 100644
--- a/docs/ai/get-started-app-chat-template.md
+++ b/docs/ai/get-started-app-chat-template.md
@@ -1,7 +1,7 @@
---
title: Get started with the chat using your own data sample for .NET
description: Get started with .NET and search across your own data using a chat app sample implemented using Azure OpenAI Service and Retrieval Augmented Generation (RAG) in Azure AI Search. Easily deploy with Azure Developer CLI. This article uses the Azure AI Reference Template sample.
-ms.date: 05/16/2024
+ms.date: 12/19/2024
ms.topic: get-started
ms.custom: devx-track-dotnet, devx-track-dotnet-ai
# CustomerIntent: As a .NET developer new to Azure OpenAI, I want deploy and use sample code to interact with app infused with my own business data so that learn from the sample code.
diff --git a/docs/ai/how-to/content-filtering.md b/docs/ai/how-to/content-filtering.md
index 1a4b6e013646e..e3227c8a77b04 100644
--- a/docs/ai/how-to/content-filtering.md
+++ b/docs/ai/how-to/content-filtering.md
@@ -5,13 +5,13 @@ ms.custom: devx-track-dotnet, devx-track-dotnet-ai
author: alexwolfmsft
ms.author: alexwolf
ms.topic: how-to
-ms.date: 05/13/2024
+ms.date: 12/19/2024
#customer intent: As a .NET developer, I want to manage OpenAI Content Filtering in a .NET app
---
-# Work with OpenAI content filtering in a .NET app
+# Work with Azure OpenAI content filtering in a .NET app
This article demonstrates how to handle content filtering concerns in a .NET app. Azure OpenAI Service includes a content filtering system that works alongside core models. This system works by running both the prompt and completion through an ensemble of classification models aimed at detecting and preventing the output of harmful content. The content filtering system detects and takes action on specific categories of potentially harmful content in both input prompts and output completions. Variations in API configurations and application design might affect completions and thus filtering behavior.
@@ -27,7 +27,7 @@ The [Content Filtering](/azure/ai-services/openai/concepts/content-filter) docum
To use the sample code in this article, you need to create and assign a content filter to your OpenAI model.
-1. [Create and assign a content filter](/azure/ai-services/openai/how-to/content-filters) to your provisioned GPT-35 or GPT-4 model.
+1. [Create and assign a content filter](/azure/ai-services/openai/how-to/content-filters) to your provisioned model.
1. Add the [`Azure.AI.OpenAI`](https://www.nuget.org/packages/Azure.AI.OpenAI) NuGet package to your project.
@@ -35,25 +35,15 @@ To use the sample code in this article, you need to create and assign a content
dotnet add package Azure.AI.OpenAI
```
-1. Create a simple chat completion flow in your .NET app using the `OpenAiClient`. Replace the `YOUR_OPENAI_ENDPOINT`, `YOUR_OPENAI_KEY`, and `YOUR_OPENAI_DEPLOYMENT` values with your own.
+1. Create a simple chat completion flow in your .NET app using the `AzureOpenAiClient`. Replace the `YOUR_MODEL_ENDPOINT` and `YOUR_MODEL_DEPLOYMENT_NAME` values with your own.
- :::code language="csharp" source="./snippets/content-filtering/program.cs" id="chatCompletionFlow":::
+ :::code language="csharp" source="./snippets/content-filtering/program.cs" :::
-1. Print out the content filtering results for each category.
+1. Replace the `YOUR_PROMPT` placeholder with your own message and run the app to experiment with content filtering results. If you enter a prompt the AI considers unsafe, Azure OpenAI returns a `400 Bad Request` code. The app prints a message in the console similar to the following:
- :::code language="csharp" source="./snippets/content-filtering/program.cs" id="printContentFilteringResult":::
-
-1. Replace the `YOUR_PROMPT` placeholder with your own message and run the app to experiment with content filtering results. The following output shows an example of a prompt that triggers a low severity content filtering result:
-
- ```output
- I am sorry if I have done anything to upset you.
- Is there anything I can do to assist you and make things better?
-
- Hate category is filtered: False with low severity.
- SelfHarm category is filtered: False with safe severity.
- Sexual category is filtered: False with safe severity.
- Violence category is filtered: False with low severity.
- ```
+```output
+The response was filtered due to the prompt triggering Azure OpenAI's content management policy...
+```
## Related content
diff --git a/docs/ai/how-to/snippets/content-filtering/AIContentFiltering.csproj b/docs/ai/how-to/snippets/content-filtering/AIContentFiltering.csproj
index b5ff6e91f980e..bd79a4df023fa 100644
--- a/docs/ai/how-to/snippets/content-filtering/AIContentFiltering.csproj
+++ b/docs/ai/how-to/snippets/content-filtering/AIContentFiltering.csproj
@@ -8,8 +8,10 @@
-
-
+
+
+
+
diff --git a/docs/ai/how-to/snippets/content-filtering/Program.cs b/docs/ai/how-to/snippets/content-filtering/Program.cs
index 514240c01e443..f6537d51e3969 100644
--- a/docs/ai/how-to/snippets/content-filtering/Program.cs
+++ b/docs/ai/how-to/snippets/content-filtering/Program.cs
@@ -1,38 +1,19 @@
-//
-using Azure;
-using Azure.AI.OpenAI;
+using Azure.AI.OpenAI;
+using Azure.Identity;
+using Microsoft.Extensions.AI;
-string endpoint = "YOUR_OPENAI_ENDPOINT";
-string key = "YOUR_OPENAI_KEY";
+IChatClient client =
+ new AzureOpenAIClient(
+ new Uri("YOUR_MODEL_ENDPOINT"),
+ new DefaultAzureCredential()).AsChatClient("YOUR_MODEL_DEPLOYMENT_NAME");
-OpenAIClient client = new(new Uri(endpoint), new AzureKeyCredential(key));
-
-var chatCompletionsOptions = new ChatCompletionsOptions()
+try
{
- DeploymentName = "YOUR_DEPLOYMENT_NAME",
- Messages =
- {
- new ChatRequestSystemMessage("You are a helpful assistant."),
- new ChatRequestUserMessage("YOUR_PROMPT")
- }
-};
-
-Response response = client.GetChatCompletions(chatCompletionsOptions);
-Console.WriteLine(response.Value.Choices[0].Message.Content);
-Console.WriteLine();
-//
+ ChatCompletion completion = await client.CompleteAsync("YOUR_PROMPT");
-//
-foreach (var promptFilterResult in response.Value.PromptFilterResults)
+ Console.WriteLine(completion.Message);
+}
+catch (Exception e)
{
- var results = promptFilterResult.ContentFilterResults;
- Console.WriteLine(@$"Hate category is filtered:
- {results.Hate.Filtered} with {results.Hate.Severity} severity.");
- Console.WriteLine(@$"Self-harm category is filtered:
- {results.SelfHarm.Filtered} with {results.SelfHarm.Severity} severity.");
- Console.WriteLine(@$"Sexual category is filtered:
- {results.Sexual.Filtered} with {results.Sexual.Severity} severity.");
- Console.WriteLine(@$"Violence category is filtered:
- {results.Violence.Filtered} with {results.Violence.Severity} severity.");
+ Console.WriteLine(e.Message);
}
-//
\ No newline at end of file
diff --git a/docs/ai/index.yml b/docs/ai/index.yml
index af00f5e0e6880..dc4e838909e12 100644
--- a/docs/ai/index.yml
+++ b/docs/ai/index.yml
@@ -8,7 +8,7 @@ metadata:
description: Samples, tutorials, and education for using AI with .NET
ms.topic: hub-page
ms.service: dotnet
- ms.date: 05/13/2024
+ ms.date: 12/19/2024
author: alexwolfmsft
ms.author: alexwolf
diff --git a/docs/ai/quickstarts/quickstart-local-ai.md b/docs/ai/quickstarts/quickstart-local-ai.md
index 41abf32ca9c0c..d8abcfe1562fe 100644
--- a/docs/ai/quickstarts/quickstart-local-ai.md
+++ b/docs/ai/quickstarts/quickstart-local-ai.md
@@ -1,7 +1,7 @@
---
title: Quickstart - Connect to and chat with a local AI using .NET
description: Set up a local AI model and chat with it using a .NET console app and the Microsoft.Extensions.AI libraries
-ms.date: 11/24/2024
+ms.date: 12/19/2024
ms.topic: quickstart
ms.custom: devx-track-dotnet, devx-track-dotnet-ai
author: alexwolfmsft