diff --git a/dotnet/Directory.Packages.props b/dotnet/Directory.Packages.props index e93dc3df49a2..252a4498bcd4 100644 --- a/dotnet/Directory.Packages.props +++ b/dotnet/Directory.Packages.props @@ -6,36 +6,37 @@ - + + + + + + + + - - - - - - - - - - - - - + + + + + + + @@ -44,7 +45,11 @@ - + + + + + @@ -57,7 +62,6 @@ - @@ -67,6 +71,7 @@ + diff --git a/dotnet/SK-dotnet.sln b/dotnet/SK-dotnet.sln index 0a711f84f5f3..76bb693a61b2 100644 --- a/dotnet/SK-dotnet.sln +++ b/dotnet/SK-dotnet.sln @@ -439,6 +439,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "sk-chatgpt-azure-function", EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "kernel-functions-generator", "samples\Demos\CreateChatGptPlugin\MathPlugin\kernel-functions-generator\kernel-functions-generator.csproj", "{78785CB1-66CF-4895-D7E5-A440DD84BE86}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Agents.AzureAI", "src\Agents\AzureAI\Agents.AzureAI.csproj", "{EA35F1B5-9148-4189-BE34-5E00AED56D65}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -1076,12 +1078,6 @@ Global {6F591D05-5F7F-4211-9042-42D8BCE60415}.Publish|Any CPU.Build.0 = Debug|Any CPU {6F591D05-5F7F-4211-9042-42D8BCE60415}.Release|Any CPU.ActiveCfg = Release|Any CPU {6F591D05-5F7F-4211-9042-42D8BCE60415}.Release|Any CPU.Build.0 = Release|Any CPU - {232E1153-6366-4175-A982-D66B30AAD610}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {232E1153-6366-4175-A982-D66B30AAD610}.Debug|Any CPU.Build.0 = Debug|Any CPU - {232E1153-6366-4175-A982-D66B30AAD610}.Publish|Any CPU.ActiveCfg = Debug|Any CPU - {232E1153-6366-4175-A982-D66B30AAD610}.Publish|Any CPU.Build.0 = Debug|Any CPU - {232E1153-6366-4175-A982-D66B30AAD610}.Release|Any CPU.ActiveCfg = Release|Any CPU - {232E1153-6366-4175-A982-D66B30AAD610}.Release|Any CPU.Build.0 = Release|Any CPU {E82B640C-1704-430D-8D71-FD8ED3695468}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {E82B640C-1704-430D-8D71-FD8ED3695468}.Debug|Any CPU.Build.0 = Debug|Any CPU {E82B640C-1704-430D-8D71-FD8ED3695468}.Publish|Any CPU.ActiveCfg = Debug|Any CPU @@ -1100,6 +1096,12 @@ Global {39EAB599-742F-417D-AF80-95F90376BB18}.Publish|Any CPU.Build.0 = Publish|Any CPU {39EAB599-742F-417D-AF80-95F90376BB18}.Release|Any CPU.ActiveCfg = Release|Any CPU {39EAB599-742F-417D-AF80-95F90376BB18}.Release|Any CPU.Build.0 = Release|Any CPU + {232E1153-6366-4175-A982-D66B30AAD610}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {232E1153-6366-4175-A982-D66B30AAD610}.Debug|Any CPU.Build.0 = Debug|Any CPU + {232E1153-6366-4175-A982-D66B30AAD610}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {232E1153-6366-4175-A982-D66B30AAD610}.Publish|Any CPU.Build.0 = Debug|Any CPU + {232E1153-6366-4175-A982-D66B30AAD610}.Release|Any CPU.ActiveCfg = Release|Any CPU + {232E1153-6366-4175-A982-D66B30AAD610}.Release|Any CPU.Build.0 = Release|Any CPU {DAC54048-A39A-4739-8307-EA5A291F2EA0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {DAC54048-A39A-4739-8307-EA5A291F2EA0}.Debug|Any CPU.Build.0 = Debug|Any CPU {DAC54048-A39A-4739-8307-EA5A291F2EA0}.Publish|Any CPU.ActiveCfg = Debug|Any CPU @@ -1172,6 +1174,12 @@ Global {78785CB1-66CF-4895-D7E5-A440DD84BE86}.Publish|Any CPU.Build.0 = Debug|Any CPU {78785CB1-66CF-4895-D7E5-A440DD84BE86}.Release|Any CPU.ActiveCfg = Release|Any CPU {78785CB1-66CF-4895-D7E5-A440DD84BE86}.Release|Any CPU.Build.0 = Release|Any CPU + {EA35F1B5-9148-4189-BE34-5E00AED56D65}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {EA35F1B5-9148-4189-BE34-5E00AED56D65}.Debug|Any CPU.Build.0 = Debug|Any CPU + {EA35F1B5-9148-4189-BE34-5E00AED56D65}.Publish|Any CPU.ActiveCfg = Publish|Any CPU + {EA35F1B5-9148-4189-BE34-5E00AED56D65}.Publish|Any CPU.Build.0 = Publish|Any CPU + {EA35F1B5-9148-4189-BE34-5E00AED56D65}.Release|Any CPU.ActiveCfg = Release|Any CPU + {EA35F1B5-9148-4189-BE34-5E00AED56D65}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -1333,6 +1341,7 @@ Global {02EA681E-C7D8-13C7-8484-4AC65E1B71E8} = {5D4C0700-BBB5-418F-A7B2-F392B9A18263} {2EB6E4C2-606D-B638-2E08-49EA2061C428} = {02EA681E-C7D8-13C7-8484-4AC65E1B71E8} {78785CB1-66CF-4895-D7E5-A440DD84BE86} = {02EA681E-C7D8-13C7-8484-4AC65E1B71E8} + {EA35F1B5-9148-4189-BE34-5E00AED56D65} = {6823CD5E-2ABE-41EB-B865-F86EC13F0CF9} EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution SolutionGuid = {FBDC56A3-86AD-4323-AA0F-201E59123B83} diff --git a/dotnet/samples/Concepts/Agents/AzureAIAgent_FileManipulation.cs b/dotnet/samples/Concepts/Agents/AzureAIAgent_FileManipulation.cs new file mode 100644 index 000000000000..9e4fc425801f --- /dev/null +++ b/dotnet/samples/Concepts/Agents/AzureAIAgent_FileManipulation.cs @@ -0,0 +1,109 @@ +// Copyright (c) Microsoft. All rights reserved. +using System.Diagnostics; +using Azure.AI.Projects; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents; +using Microsoft.SemanticKernel.Agents.AzureAI; +using Microsoft.SemanticKernel.ChatCompletion; +using Resources; +using Agent = Azure.AI.Projects.Agent; + +namespace Agents; + +/// +/// Demonstrate using code-interpreter to manipulate and generate csv files with . +/// +public class AzureAIAgent_FileManipulation(ITestOutputHelper output) : BaseAgentsTest(output) +{ + [Fact] + public async Task AnalyzeCSVFileUsingAzureAIAgentAsync() + { + AzureAIClientProvider clientProvider = this.GetAzureProvider(); + AgentsClient client = clientProvider.Client.GetAgentsClient(); + + await using Stream stream = EmbeddedResource.ReadStream("sales.csv")!; + AgentFile fileInfo = await client.UploadFileAsync(stream, AgentFilePurpose.Agents, "sales.csv"); + + // Define the agent + Agent definition = await client.CreateAgentAsync( + TestConfiguration.AzureAI.ChatModelId, + tools: [new CodeInterpreterToolDefinition()], + toolResources: + new() + { + CodeInterpreter = new() + { + FileIds = { fileInfo.Id }, + } + }); + AzureAIAgent agent = new(definition, clientProvider); + + // Create a chat for agent interaction. + AgentGroupChat chat = new(); + + // Respond to user input + try + { + await InvokeAgentAsync("Which segment had the most sales?"); + await InvokeAgentAsync("List the top 5 countries that generated the most profit."); + await InvokeAgentAsync("Create a tab delimited file report of profit by each country per month."); + } + finally + { + await client.DeleteAgentAsync(agent.Id); + await client.DeleteFileAsync(fileInfo.Id); + } + + // Local function to invoke agent and display the conversation messages. + async Task InvokeAgentAsync(string input) + { + ChatMessageContent message = new(AuthorRole.User, input); + chat.AddChatMessage(new(AuthorRole.User, input)); + this.WriteAgentChatMessage(message); + + await foreach (ChatMessageContent response in chat.InvokeAsync(agent)) + { + this.WriteAgentChatMessage(response); + await this.DownloadContentAsync(client, response); + } + } + } + + private async Task DownloadContentAsync(AgentsClient client, ChatMessageContent message) + { + foreach (KernelContent item in message.Items) + { + if (item is AnnotationContent annotation) + { + await this.DownloadFileAsync(client, annotation.FileId!); + } + } + } + + private async Task DownloadFileAsync(AgentsClient client, string fileId, bool launchViewer = false) + { + AgentFile fileInfo = client.GetFile(fileId); + if (fileInfo.Purpose == AgentFilePurpose.AgentsOutput) + { + string filePath = Path.Combine(Path.GetTempPath(), Path.GetFileName(fileInfo.Filename)); + if (launchViewer) + { + filePath = Path.ChangeExtension(filePath, ".png"); + } + + BinaryData content = await client.GetFileContentAsync(fileId); + File.WriteAllBytes(filePath, content.ToArray()); + Console.WriteLine($" File #{fileId} saved to: {filePath}"); + + if (launchViewer) + { + Process.Start( + new ProcessStartInfo + { + FileName = "cmd.exe", + Arguments = $"/C start {filePath}" + }); + } + } + } +} diff --git a/dotnet/samples/Concepts/Agents/AzureAIAgent_Streaming.cs b/dotnet/samples/Concepts/Agents/AzureAIAgent_Streaming.cs new file mode 100644 index 000000000000..731c4b466ae8 --- /dev/null +++ b/dotnet/samples/Concepts/Agents/AzureAIAgent_Streaming.cs @@ -0,0 +1,189 @@ +// Copyright (c) Microsoft. All rights reserved. +using System.ComponentModel; +using Azure.AI.Projects; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents.AzureAI; +using Microsoft.SemanticKernel.ChatCompletion; +using Agent = Azure.AI.Projects.Agent; + +namespace Agents; + +/// +/// Demonstrate consuming "streaming" message for . +/// +public class AzureAIAgent_Streaming(ITestOutputHelper output) : BaseAgentsTest(output) +{ + [Fact] + public async Task UseStreamingAgentAsync() + { + const string AgentName = "Parrot"; + const string AgentInstructions = "Repeat the user message in the voice of a pirate and then end with a parrot sound."; + + // Define the agent + AzureAIClientProvider clientProvider = this.GetAzureProvider(); + AgentsClient client = clientProvider.Client.GetAgentsClient(); + Agent definition = await client.CreateAgentAsync( + TestConfiguration.AzureAI.ChatModelId, + AgentName, + null, + AgentInstructions); + AzureAIAgent agent = new(definition, clientProvider); + + // Create a thread for the agent conversation. + AgentThread thread = await client.CreateThreadAsync(metadata: AssistantSampleMetadata); + + // Respond to user input + await InvokeAgentAsync(agent, thread.Id, "Fortune favors the bold."); + await InvokeAgentAsync(agent, thread.Id, "I came, I saw, I conquered."); + await InvokeAgentAsync(agent, thread.Id, "Practice makes perfect."); + + // Output the entire chat history + await DisplayChatHistoryAsync(agent, thread.Id); + } + + [Fact] + public async Task UseStreamingAssistantAgentWithPluginAsync() + { + const string AgentName = "Host"; + const string AgentInstructions = "Answer questions about the menu."; + + // Define the agent + AzureAIClientProvider clientProvider = this.GetAzureProvider(); + AgentsClient client = clientProvider.Client.GetAgentsClient(); + Agent definition = await client.CreateAgentAsync( + TestConfiguration.AzureAI.ChatModelId, + AgentName, + null, + AgentInstructions); + AzureAIAgent agent = new(definition, clientProvider) + { + Kernel = new Kernel(), + }; + + // Initialize plugin and add to the agent's Kernel (same as direct Kernel usage). + KernelPlugin plugin = KernelPluginFactory.CreateFromType(); + agent.Kernel.Plugins.Add(plugin); + + // Create a thread for the agent conversation. + AgentThread thread = await client.CreateThreadAsync(metadata: AssistantSampleMetadata); + + // Respond to user input + await InvokeAgentAsync(agent, thread.Id, "What is the special soup and its price?"); + await InvokeAgentAsync(agent, thread.Id, "What is the special drink and its price?"); + + // Output the entire chat history + await DisplayChatHistoryAsync(agent, thread.Id); + } + + [Fact] + public async Task UseStreamingAssistantWithCodeInterpreterAsync() + { + const string AgentName = "MathGuy"; + const string AgentInstructions = "Solve math problems with code."; + + // Define the agent + AzureAIClientProvider clientProvider = this.GetAzureProvider(); + AgentsClient client = clientProvider.Client.GetAgentsClient(); + Agent definition = await client.CreateAgentAsync( + TestConfiguration.AzureAI.ChatModelId, + AgentName, + null, + AgentInstructions, + [new CodeInterpreterToolDefinition()]); + AzureAIAgent agent = new(definition, clientProvider) + { + Kernel = new Kernel(), + }; + + // Create a thread for the agent conversation. + AgentThread thread = await client.CreateThreadAsync(metadata: AssistantSampleMetadata); + + // Respond to user input + await InvokeAgentAsync(agent, thread.Id, "Is 191 a prime number?"); + await InvokeAgentAsync(agent, thread.Id, "Determine the values in the Fibonacci sequence that that are less then the value of 101"); + + // Output the entire chat history + await DisplayChatHistoryAsync(agent, thread.Id); + } + + // Local function to invoke agent and display the conversation messages. + private async Task InvokeAgentAsync(AzureAIAgent agent, string threadId, string input) + { + ChatMessageContent message = new(AuthorRole.User, input); + await agent.AddChatMessageAsync(threadId, message); + this.WriteAgentChatMessage(message); + + ChatHistory history = []; + + bool isFirst = false; + bool isCode = false; + await foreach (StreamingChatMessageContent response in agent.InvokeStreamingAsync(threadId, messages: history)) + { + if (string.IsNullOrEmpty(response.Content)) + { + StreamingFunctionCallUpdateContent? functionCall = response.Items.OfType().SingleOrDefault(); + if (functionCall != null) + { + Console.WriteLine($"\n# {response.Role} - {response.AuthorName ?? "*"}: FUNCTION CALL - {functionCall.Name}"); + } + + continue; + } + + // Differentiate between assistant and tool messages + if (isCode != (response.Metadata?.ContainsKey(AzureAIAgent.CodeInterpreterMetadataKey) ?? false)) + { + isFirst = false; + isCode = !isCode; + } + + if (!isFirst) + { + Console.WriteLine($"\n# {response.Role} - {response.AuthorName ?? "*"}:"); + isFirst = true; + } + + Console.WriteLine($"\t > streamed: '{response.Content}'"); + } + + foreach (ChatMessageContent content in history) + { + this.WriteAgentChatMessage(content); + } + } + + private async Task DisplayChatHistoryAsync(AzureAIAgent agent, string threadId) + { + Console.WriteLine("================================"); + Console.WriteLine("CHAT HISTORY"); + Console.WriteLine("================================"); + + ChatMessageContent[] messages = await agent.GetThreadMessagesAsync(threadId).ToArrayAsync(); + for (int index = messages.Length - 1; index >= 0; --index) + { + this.WriteAgentChatMessage(messages[index]); + } + } + + public sealed class MenuPlugin + { + [KernelFunction, Description("Provides a list of specials from the menu.")] + [System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1024:Use properties where appropriate", Justification = "Too smart")] + public string GetSpecials() + { + return @" +Special Soup: Clam Chowder +Special Salad: Cobb Salad +Special Drink: Chai Tea +"; + } + + [KernelFunction, Description("Provides the price of the requested menu item.")] + public string GetItemPrice( + [Description("The name of the menu item.")] + string menuItem) + { + return "$9.99"; + } + } +} diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_FunctionFilters.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_FunctionFilters.cs index 1381378a06c8..c3b3a851bc93 100644 --- a/dotnet/samples/Concepts/Agents/OpenAIAssistant_FunctionFilters.cs +++ b/dotnet/samples/Concepts/Agents/OpenAIAssistant_FunctionFilters.cs @@ -15,8 +15,6 @@ namespace Agents; /// public class OpenAIAssistant_FunctionFilters(ITestOutputHelper output) : BaseAgentsTest(output) { - protected override bool ForceOpenAI => true; // %%% REMOVE - [Fact] public async Task UseFunctionInvocationFilterAsync() { diff --git a/dotnet/samples/Concepts/Concepts.csproj b/dotnet/samples/Concepts/Concepts.csproj index 746d5fbb73cf..ed5a4a0782c7 100644 --- a/dotnet/samples/Concepts/Concepts.csproj +++ b/dotnet/samples/Concepts/Concepts.csproj @@ -52,6 +52,7 @@ + diff --git a/dotnet/samples/GettingStartedWithAgents/GettingStartedWithAgents.csproj b/dotnet/samples/GettingStartedWithAgents/GettingStartedWithAgents.csproj index 3a061b4fb4a0..c6c490634d76 100644 --- a/dotnet/samples/GettingStartedWithAgents/GettingStartedWithAgents.csproj +++ b/dotnet/samples/GettingStartedWithAgents/GettingStartedWithAgents.csproj @@ -40,6 +40,7 @@ + diff --git a/dotnet/samples/GettingStartedWithAgents/README.md b/dotnet/samples/GettingStartedWithAgents/README.md index ed0e68802994..7c648d80fb70 100644 --- a/dotnet/samples/GettingStartedWithAgents/README.md +++ b/dotnet/samples/GettingStartedWithAgents/README.md @@ -86,12 +86,18 @@ To set your secrets with .NET Secret Manager: 5. Or Azure Open AI: ``` - dotnet user-secrets set "AzureOpenAI:DeploymentName" "..." - dotnet user-secrets set "AzureOpenAI:ChatDeploymentName" "..." + dotnet user-secrets set "AzureOpenAI:ChatDeploymentName" "gpt-4o" dotnet user-secrets set "AzureOpenAI:Endpoint" "https://... .openai.azure.com/" dotnet user-secrets set "AzureOpenAI:ApiKey" "..." ``` +5. Or Azure AI: + + ``` + dotnet user-secrets set "AzureAI:ConnectionString" "..." + dotnet user-secrets set "AzureAI:ChatModelId" "gpt-4o" + ``` + > NOTE: Azure secrets will take precedence, if both Open AI and Azure Open AI secrets are defined, unless `ForceOpenAI` is set: ``` diff --git a/dotnet/samples/GettingStartedWithAgents/Resources/countries.json b/dotnet/samples/GettingStartedWithAgents/Resources/countries.json new file mode 100644 index 000000000000..b88d5040750a --- /dev/null +++ b/dotnet/samples/GettingStartedWithAgents/Resources/countries.json @@ -0,0 +1,46 @@ +{ + "openapi": "3.1.0", + "info": { + "title": "RestCountries.NET API", + "description": "Web API version 3.1 for managing country items, based on previous implementations from restcountries.eu and restcountries.com.", + "version": "v3.1" + }, + "servers": [ + { "url": "https://restcountries.net" } + ], + "auth": [], + "paths": { + "/v3.1/currency": { + "get": { + "description": "Search by currency.", + "operationId": "LookupCountryByCurrency", + "parameters": [ + { + "name": "currency", + "in": "query", + "description": "The currency to search for.", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Success", + "content": { + "text/plain": { + "schema": { + "type": "string" + } + } + } + } + } + } + } + }, + "components": { + "schemes": {} + } +} \ No newline at end of file diff --git a/dotnet/samples/GettingStartedWithAgents/Resources/weather.json b/dotnet/samples/GettingStartedWithAgents/Resources/weather.json new file mode 100644 index 000000000000..c3009f417de4 --- /dev/null +++ b/dotnet/samples/GettingStartedWithAgents/Resources/weather.json @@ -0,0 +1,62 @@ +{ + "openapi": "3.1.0", + "info": { + "title": "get weather data", + "description": "Retrieves current weather data for a location based on wttr.in.", + "version": "v1.0.0" + }, + "servers": [ + { + "url": "https://wttr.in" + } + ], + "auth": [], + "paths": { + "/{location}": { + "get": { + "description": "Get weather information for a specific location", + "operationId": "GetCurrentWeather", + "parameters": [ + { + "name": "location", + "in": "path", + "description": "City or location to retrieve the weather for", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "format", + "in": "query", + "description": "Always use j1 value for this parameter", + "required": true, + "schema": { + "type": "string", + "default": "j1" + } + } + ], + "responses": { + "200": { + "description": "Successful response", + "content": { + "text/plain": { + "schema": { + "type": "string" + } + } + } + }, + "404": { + "description": "Location not found" + } + }, + "deprecated": false + } + } + }, + "components": { + "schemes": {} + } +} \ No newline at end of file diff --git a/dotnet/samples/GettingStartedWithAgents/Step08_Assistant.cs b/dotnet/samples/GettingStartedWithAgents/Step08_Assistant.cs index 1e952810e51e..cadf16ce1407 100644 --- a/dotnet/samples/GettingStartedWithAgents/Step08_Assistant.cs +++ b/dotnet/samples/GettingStartedWithAgents/Step08_Assistant.cs @@ -14,6 +14,8 @@ namespace GettingStarted; /// public class Step08_Assistant(ITestOutputHelper output) : BaseAgentsTest(output) { + //protected override bool ForceOpenAI => true; + private const string HostName = "Host"; private const string HostInstructions = "Answer questions about the menu."; diff --git a/dotnet/samples/GettingStartedWithAgents/Step09_Assistant_Vision.cs b/dotnet/samples/GettingStartedWithAgents/Step09_Assistant_Vision.cs index 09b02d4ceebf..cd4de46f2b88 100644 --- a/dotnet/samples/GettingStartedWithAgents/Step09_Assistant_Vision.cs +++ b/dotnet/samples/GettingStartedWithAgents/Step09_Assistant_Vision.cs @@ -17,7 +17,7 @@ public class Step09_Assistant_Vision(ITestOutputHelper output) : BaseAgentsTest( protected override bool ForceOpenAI => true; [Fact] - public async Task UseSingleAssistantAgentAsync() + public async Task UseImageContentWithAssistantAsync() { // Define the agent OpenAIClientProvider provider = this.GetClientProvider(); diff --git a/dotnet/samples/GettingStartedWithAgents/Step12_Azure.cs b/dotnet/samples/GettingStartedWithAgents/Step12_Azure.cs new file mode 100644 index 000000000000..b144a066b7c5 --- /dev/null +++ b/dotnet/samples/GettingStartedWithAgents/Step12_Azure.cs @@ -0,0 +1,144 @@ +// Copyright (c) Microsoft. All rights reserved. +using System.ComponentModel; +using Azure.AI.Projects; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents; +using Microsoft.SemanticKernel.Agents.AzureAI; +using Microsoft.SemanticKernel.ChatCompletion; +using Resources; +using Agent = Azure.AI.Projects.Agent; + +namespace GettingStarted; + +/// +/// This example demonstrates similarity between using +/// and (see: Step 2). +/// +public class Step12_Azure(ITestOutputHelper output) : BaseAgentsTest(output) +{ + private const string HostName = "Host"; + private const string HostInstructions = "Answer questions about the menu."; + + [Fact] + public async Task UseSingleAssistantAgentAsync() + { + // Define the agent + AzureAIClientProvider clientProvider = this.GetAzureProvider(); + AgentsClient client = clientProvider.Client.GetAgentsClient(); + + Agent definition = await client.CreateAgentAsync( + TestConfiguration.AzureAI.ChatModelId, + HostName, + null, + HostInstructions); + AzureAIAgent agent = new(definition, clientProvider) + { + Kernel = new Kernel(), + }; + + // Initialize plugin and add to the agent's Kernel (same as direct Kernel usage). + KernelPlugin plugin = KernelPluginFactory.CreateFromType(); + agent.Kernel.Plugins.Add(plugin); + + // Create a thread for the agent conversation. + AgentThread thread = await client.CreateThreadAsync(metadata: AssistantSampleMetadata); + + // Respond to user input + try + { + await InvokeAgentAsync("Hello"); + await InvokeAgentAsync("What is the special soup and its price?"); + await InvokeAgentAsync("What is the special drink and its price?"); + await InvokeAgentAsync("Thank you"); + } + finally + { + await client.DeleteThreadAsync(thread.Id); + await client.DeleteAgentAsync(agent.Id); + } + + // Local function to invoke agent and display the conversation messages. + async Task InvokeAgentAsync(string input) + { + ChatMessageContent message = new(AuthorRole.User, input); + await agent.AddChatMessageAsync(thread.Id, message); + this.WriteAgentChatMessage(message); + + await foreach (ChatMessageContent response in agent.InvokeAsync(thread.Id)) + { + this.WriteAgentChatMessage(response); + } + } + } + + [Fact] + public async Task UseTemplateForAssistantAgentAsync() + { + // Define the agent + string generateStoryYaml = EmbeddedResource.Read("GenerateStory.yaml"); + PromptTemplateConfig templateConfig = KernelFunctionYaml.ToPromptTemplateConfig(generateStoryYaml); + + AzureAIClientProvider clientProvider = this.GetAzureProvider(); + AgentsClient client = clientProvider.Client.GetAgentsClient(); + Agent definition = await client.CreateAgentAsync("gpt-4o", templateConfig.Name, templateConfig.Description, templateConfig.Template); + // Instructions, Name and Description properties defined via the config. + AzureAIAgent agent = new(definition, clientProvider, new KernelPromptTemplateFactory()) + { + Kernel = new Kernel(), + Arguments = new KernelArguments() + { + { "topic", "Dog" }, + { "length", "3" }, + }, + }; + + // Create a thread for the agent conversation. + AgentThread thread = await client.CreateThreadAsync(metadata: AssistantSampleMetadata); + + try + { + // Invoke the agent with the default arguments. + await InvokeAgentAsync(); + + // Invoke the agent with the override arguments. + await InvokeAgentAsync( + new() + { + { "topic", "Cat" }, + { "length", "3" }, + }); + } + finally + { + await client.DeleteThreadAsync(thread.Id); + await client.DeleteAgentAsync(agent.Id); + } + + // Local function to invoke agent and display the response. + async Task InvokeAgentAsync(KernelArguments? arguments = null) + { + await foreach (ChatMessageContent response in agent.InvokeAsync(thread.Id, arguments)) + { + WriteAgentChatMessage(response); + } + } + } + + private sealed class MenuPlugin + { + [KernelFunction, Description("Provides a list of specials from the menu.")] + [System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1024:Use properties where appropriate", Justification = "Too smart")] + public string GetSpecials() => + """ + Special Soup: Clam Chowder + Special Salad: Cobb Salad + Special Drink: Chai Tea + """; + + [KernelFunction, Description("Provides the price of the requested menu item.")] + public string GetItemPrice( + [Description("The name of the menu item.")] + string menuItem) => + "$9.99"; + } +} diff --git a/dotnet/samples/GettingStartedWithAgents/Step13_Azure_Chat.cs b/dotnet/samples/GettingStartedWithAgents/Step13_Azure_Chat.cs new file mode 100644 index 000000000000..fcc2598d1e68 --- /dev/null +++ b/dotnet/samples/GettingStartedWithAgents/Step13_Azure_Chat.cs @@ -0,0 +1,110 @@ +// Copyright (c) Microsoft. All rights reserved. +using Azure.AI.Projects; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents; +using Microsoft.SemanticKernel.Agents.AzureAI; +using Microsoft.SemanticKernel.Agents.Chat; +using Microsoft.SemanticKernel.ChatCompletion; +using Agent = Azure.AI.Projects.Agent; + +namespace GettingStarted; + +/// +/// Demonstrate creation of with +/// that inform how chat proceeds with regards to: Agent selection, chat continuation, and maximum +/// number of agent interactions. +/// +public class Step13_Azure_Chat(ITestOutputHelper output) : BaseAgentsTest(output) +{ + private const string ReviewerName = "ArtDirector"; + private const string ReviewerInstructions = + """ + You are an art director who has opinions about copywriting born of a love for David Ogilvy. + The goal is to determine if the given copy is acceptable to print. + If so, state that it is approved. Do not use the word "approve" unless you are giving approval. + If not, provide insight on how to refine suggested copy without example. + """; + + private const string CopyWriterName = "CopyWriter"; + private const string CopyWriterInstructions = + """ + You are a copywriter with ten years of experience and are known for brevity and a dry humor. + The goal is to refine and decide on the single best copy as an expert in the field. + Only provide a single proposal per response. + You're laser focused on the goal at hand. + Don't waste time with chit chat. + Consider suggestions when refining an idea. + """; + + [Fact] + public async Task UseGroupChatWithTwoAgentsAsync() + { + // Define the agents + AzureAIClientProvider clientProvider = this.GetAzureProvider(); + AgentsClient client = clientProvider.Client.GetAgentsClient(); + Agent reviewerModel = await client.CreateAgentAsync( + TestConfiguration.AzureAI.ChatModelId, + ReviewerName, + null, + ReviewerInstructions); + AzureAIAgent agentReviewer = new(reviewerModel, clientProvider); + Agent writerModel = await client.CreateAgentAsync( + TestConfiguration.AzureAI.ChatModelId, + CopyWriterName, + null, + CopyWriterInstructions); + AzureAIAgent agentWriter = new(writerModel, clientProvider); + + // Create a chat for agent interaction. + AgentGroupChat chat = + new(agentWriter, agentReviewer) + { + ExecutionSettings = + new() + { + // Here a TerminationStrategy subclass is used that will terminate when + // an assistant message contains the term "approve". + TerminationStrategy = + new ApprovalTerminationStrategy() + { + // Only the art-director may approve. + Agents = [agentReviewer], + // Limit total number of turns + MaximumIterations = 10, + } + } + }; + + try + { + // Invoke chat and display messages. + ChatMessageContent input = new(AuthorRole.User, "concept: maps made out of egg cartons."); + chat.AddChatMessage(input); + this.WriteAgentChatMessage(input); + + await foreach (ChatMessageContent response in chat.InvokeAsync()) + { + this.WriteAgentChatMessage(response); + } + + Console.WriteLine($"\n[IS COMPLETED: {chat.IsComplete}]"); + + var history = await chat.GetChatMessagesAsync(agentReviewer).Reverse().ToArrayAsync(); + foreach (ChatMessageContent response in history) + { + this.WriteAgentChatMessage(response); + } + } + finally + { + await chat.ResetAsync(); + } + } + + private sealed class ApprovalTerminationStrategy : TerminationStrategy + { + // Terminate when the final message contains the term "approve" + protected override Task ShouldAgentTerminateAsync(Microsoft.SemanticKernel.Agents.Agent agent, IReadOnlyList history, CancellationToken cancellationToken) + => Task.FromResult(history[history.Count - 1].Content?.Contains("approve", StringComparison.OrdinalIgnoreCase) ?? false); + } +} diff --git a/dotnet/samples/GettingStartedWithAgents/Step14_AzureTool_CodeInterpreter.cs b/dotnet/samples/GettingStartedWithAgents/Step14_AzureTool_CodeInterpreter.cs new file mode 100644 index 000000000000..5d02b21e4315 --- /dev/null +++ b/dotnet/samples/GettingStartedWithAgents/Step14_AzureTool_CodeInterpreter.cs @@ -0,0 +1,56 @@ +// Copyright (c) Microsoft. All rights reserved. +using Azure.AI.Projects; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents.AzureAI; +using Microsoft.SemanticKernel.ChatCompletion; +using Agent = Azure.AI.Projects.Agent; + +namespace GettingStarted; + +/// +/// Demonstrate using code-interpreter on . +/// +public class Step14_AzureTool_CodeInterpreter(ITestOutputHelper output) : BaseAgentsTest(output) +{ + [Fact] + public async Task UseCodeInterpreterToolWithAgentAsync() + { + // Define the agent + AzureAIClientProvider clientProvider = this.GetAzureProvider(); + AgentsClient client = clientProvider.Client.GetAgentsClient(); + Agent definition = await client.CreateAgentAsync( + TestConfiguration.AzureAI.ChatModelId, + tools: [new CodeInterpreterToolDefinition()]); + AzureAIAgent agent = new(definition, clientProvider) + { + Kernel = new Kernel(), + }; + + // Create a thread for the agent conversation. + AgentThread thread = await client.CreateThreadAsync(metadata: AssistantSampleMetadata); + + // Respond to user input + try + { + await InvokeAgentAsync("Use code to determine the values in the Fibonacci sequence that that are less then the value of 101?"); + } + finally + { + await client.DeleteThreadAsync(thread.Id); + await client.DeleteAgentAsync(agent.Id); + } + + // Local function to invoke agent and display the conversation messages. + async Task InvokeAgentAsync(string input) + { + ChatMessageContent message = new(AuthorRole.User, input); + await agent.AddChatMessageAsync(thread.Id, message); + this.WriteAgentChatMessage(message); + + await foreach (ChatMessageContent response in agent.InvokeAsync(thread.Id)) + { + this.WriteAgentChatMessage(response); + } + } + } +} diff --git a/dotnet/samples/GettingStartedWithAgents/Step15_AzureTool_FileSearch.cs b/dotnet/samples/GettingStartedWithAgents/Step15_AzureTool_FileSearch.cs new file mode 100644 index 000000000000..04f5a6107611 --- /dev/null +++ b/dotnet/samples/GettingStartedWithAgents/Step15_AzureTool_FileSearch.cs @@ -0,0 +1,71 @@ +// Copyright (c) Microsoft. All rights reserved. +using Azure.AI.Projects; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents.AzureAI; +using Microsoft.SemanticKernel.ChatCompletion; +using Resources; +using Agent = Azure.AI.Projects.Agent; + +namespace GettingStarted; + +/// +/// Demonstrate using code-interpreter on . +/// +public class Step15_AzureTool_FileSearch(ITestOutputHelper output) : BaseAgentsTest(output) +{ + [Fact] + public async Task UseFileSearchToolWithAgentAsync() + { + // Define the agent + await using Stream stream = EmbeddedResource.ReadStream("employees.pdf")!; + + AzureAIClientProvider clientProvider = this.GetAzureProvider(); + AgentsClient client = clientProvider.Client.GetAgentsClient(); + AgentFile fileInfo = await client.UploadFileAsync(stream, AgentFilePurpose.Agents, "employees.pdf"); + VectorStore fileStore = await client.CreateVectorStoreAsync([fileInfo.Id], "step16-test"); + //await client.CreateVectorStoreFileAsync(fileStore.Id, fileInfo.Id); + //Metadata = { { AssistantSampleMetadataKey, bool.TrueString } } + Agent agentModel = await client.CreateAgentAsync( + TestConfiguration.AzureAI.ChatModelId, + tools: [new FileSearchToolDefinition()], + toolResources: new() + { + FileSearch = new() + { + VectorStoreIds = { fileStore.Id }, + } + }); + AzureAIAgent agent = new(agentModel, clientProvider); + + // Create a thread associated for the agent conversation. + AgentThread thread = await client.CreateThreadAsync(metadata: AssistantSampleMetadata); + + // Respond to user input + try + { + await InvokeAgentAsync("Who is the youngest employee?"); + await InvokeAgentAsync("Who works in sales?"); + await InvokeAgentAsync("I have a customer request, who can help me?"); + } + finally + { + await client.DeleteThreadAsync(thread.Id); + await client.DeleteAgentAsync(agent.Id); + await client.DeleteVectorStoreAsync(fileStore.Id); + await client.DeleteFileAsync(fileInfo.Id); + } + + // Local function to invoke agent and display the conversation messages. + async Task InvokeAgentAsync(string input) + { + ChatMessageContent message = new(AuthorRole.User, input); + await agent.AddChatMessageAsync(thread.Id, message); + this.WriteAgentChatMessage(message); + + await foreach (ChatMessageContent response in agent.InvokeAsync(thread.Id)) + { + this.WriteAgentChatMessage(response); + } + } + } +} diff --git a/dotnet/samples/GettingStartedWithAgents/Step16_Azure_OpenAPI.cs b/dotnet/samples/GettingStartedWithAgents/Step16_Azure_OpenAPI.cs new file mode 100644 index 000000000000..511d90512826 --- /dev/null +++ b/dotnet/samples/GettingStartedWithAgents/Step16_Azure_OpenAPI.cs @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft. All rights reserved. +using Azure.AI.Projects; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents; +using Microsoft.SemanticKernel.Agents.AzureAI; +using Microsoft.SemanticKernel.ChatCompletion; +using Resources; +using Agent = Azure.AI.Projects.Agent; + +namespace GettingStarted; + +/// +/// This example demonstrates similarity between using +/// and (see: Step 2). +/// +/// +/// Note: Open API invocation does not involve kernel function calling or kernel filters. +/// Azure Function invocation is managed entirely by the Azure AI Agent service. +/// +public class Step16_Azure_OpenAPI(ITestOutputHelper output) : BaseAgentsTest(output) +{ + [Fact] + public async Task UseOpenAPIToolWithAgentAsync() + { + // Retrieve Open API specifications + string apiCountries = EmbeddedResource.Read("countries.json"); + string apiWeather = EmbeddedResource.Read("weather.json"); + + // Define the agent + AzureAIClientProvider clientProvider = this.GetAzureProvider(); + AgentsClient client = clientProvider.Client.GetAgentsClient(); + Agent definition = await client.CreateAgentAsync( + TestConfiguration.AzureAI.ChatModelId, + tools: + [ + new OpenApiToolDefinition("RestCountries", "Retrieve country information", BinaryData.FromString(apiCountries), new OpenApiAnonymousAuthDetails()), + new OpenApiToolDefinition("Weather", "Retrieve weather by location", BinaryData.FromString(apiWeather), new OpenApiAnonymousAuthDetails()) + ]); + AzureAIAgent agent = new(definition, clientProvider) + { + Kernel = new Kernel(), + }; + + // Create a thread for the agent conversation. + AgentThread thread = await client.CreateThreadAsync(metadata: AssistantSampleMetadata); + + // Respond to user input + try + { + await InvokeAgentAsync("What is the name and population of the country that uses currency with abbreviation THB"); + await InvokeAgentAsync("What is the weather in the capitol city of that country?"); + } + finally + { + await client.DeleteThreadAsync(thread.Id); + await client.DeleteAgentAsync(agent.Id); + } + + // Local function to invoke agent and display the conversation messages. + async Task InvokeAgentAsync(string input) + { + ChatMessageContent message = new(AuthorRole.User, input); + await agent.AddChatMessageAsync(thread.Id, message); + this.WriteAgentChatMessage(message); + + await foreach (ChatMessageContent response in agent.InvokeAsync(thread.Id)) + { + this.WriteAgentChatMessage(response); + } + } + } +} diff --git a/dotnet/src/Agents/AzureAI/Agents.AzureAI.csproj b/dotnet/src/Agents/AzureAI/Agents.AzureAI.csproj new file mode 100644 index 000000000000..6ff0ac29ff9a --- /dev/null +++ b/dotnet/src/Agents/AzureAI/Agents.AzureAI.csproj @@ -0,0 +1,45 @@ + + + + + Microsoft.SemanticKernel.Agents.AzureAI + Microsoft.SemanticKernel.Agents.AzureAI + net8.0;netstandard2.0 + $(NoWarn);SKEXP0110 + false + alpha + + + + + + + Semantic Kernel Agents - AzureAI + Defines core a concrete Agent based on the Azure AI Agent API. + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/dotnet/src/Agents/AzureAI/AzureAIAgent.cs b/dotnet/src/Agents/AzureAI/AzureAIAgent.cs new file mode 100644 index 000000000000..54ac5155e320 --- /dev/null +++ b/dotnet/src/Agents/AzureAI/AzureAIAgent.cs @@ -0,0 +1,258 @@ +// Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; +using Azure.AI.Projects; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.Agents.AzureAI.Internal; +using Microsoft.SemanticKernel.ChatCompletion; + +namespace Microsoft.SemanticKernel.Agents.AzureAI; + +/// +/// A specialization based on Open AI Assistant / GPT. +/// +public sealed class AzureAIAgent : KernelAgent +{ + /// + /// Tool definitions used when associating a file attachment to an input message: + /// . + /// + public static class Tools + { + /// + /// The code-interpreter tool. + /// + public static readonly string CodeInterpreter = "code_interpreter"; + + /// + /// The file-search tool. + /// + public const string FileSearch = "file_search"; + } + + /// + /// Metadata key that identifies code-interpreter content. + /// + public const string CodeInterpreterMetadataKey = "code"; + + private readonly AzureAIClientProvider _provider; + private readonly AgentsClient _client; + private readonly string[] _channelKeys; + + /// + /// The assistant definition. + /// + public Azure.AI.Projects.Agent Definition { get; private init; } + + /// + /// Defines polling behavior for run processing + /// + public RunPollingOptions PollingOptions { get; } = new(); + + /// + /// Adds a message to the specified thread. + /// + /// The thread identifier + /// A non-system message with which to append to the conversation. + /// The to monitor for cancellation requests. The default is . + /// + /// Only supports messages with role = User or agent: + /// https://platform.openai.com/docs/api-reference/runs/createRun#runs-createrun-additional_messages + /// + public Task AddChatMessageAsync(string threadId, ChatMessageContent message, CancellationToken cancellationToken = default) + { + return AgentThreadActions.CreateMessageAsync(this._client, threadId, message, cancellationToken); + } + + /// + /// Gets messages for a specified thread. + /// + /// The thread identifier + /// The to monitor for cancellation requests. The default is . + /// Asynchronous enumeration of messages. + public IAsyncEnumerable GetThreadMessagesAsync(string threadId, CancellationToken cancellationToken = default) + { + return AgentThreadActions.GetMessagesAsync(this._client, threadId, cancellationToken); + } + + /// + /// Invoke the assistant on the specified thread. + /// + /// The thread identifier + /// Optional arguments to pass to the agents's invocation, including any . + /// The containing services, plugins, and other state for use by the agent. + /// The to monitor for cancellation requests. The default is . + /// Asynchronous enumeration of response messages. + /// + /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility. + /// + public IAsyncEnumerable InvokeAsync( + string threadId, + KernelArguments? arguments = null, + Kernel? kernel = null, + CancellationToken cancellationToken = default) + { + return this.InvokeAsync(threadId, options: null, arguments, kernel, cancellationToken); + } + + /// + /// Invoke the assistant on the specified thread. + /// + /// The thread identifier + /// Optional invocation options + /// Optional arguments to pass to the agents's invocation, including any . + /// The containing services, plugins, and other state for use by the agent. + /// The to monitor for cancellation requests. The default is . + /// Asynchronous enumeration of response messages. + /// + /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility. + /// + public async IAsyncEnumerable InvokeAsync( + string threadId, + AzureAIInvocationOptions? options, + KernelArguments? arguments = null, + Kernel? kernel = null, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + kernel ??= this.Kernel; + arguments = this.MergeArguments(arguments); + + await foreach ((bool isVisible, ChatMessageContent message) in AgentThreadActions.InvokeAsync(this, this._client, threadId, options, this.Logger, kernel, arguments, cancellationToken).ConfigureAwait(false)) + { + if (isVisible) + { + yield return message; + } + } + } + + /// + /// Invoke the assistant on the specified thread with streaming response. + /// + /// The thread identifier + /// Optional arguments to pass to the agents's invocation, including any . + /// The containing services, plugins, and other state for use by the agent. + /// Optional receiver of the completed messages generated + /// The to monitor for cancellation requests. The default is . + /// Asynchronous enumeration of messages. + /// + /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility. + /// + public IAsyncEnumerable InvokeStreamingAsync( + string threadId, + KernelArguments? arguments = null, + Kernel? kernel = null, + ChatHistory? messages = null, + CancellationToken cancellationToken = default) + { + return this.InvokeStreamingAsync(threadId, options: null, arguments, kernel, messages, cancellationToken); + } + + /// + /// Invoke the assistant on the specified thread with streaming response. + /// + /// The thread identifier + /// Optional invocation options + /// Optional arguments to pass to the agents's invocation, including any . + /// The containing services, plugins, and other state for use by the agent. + /// Optional receiver of the completed messages generated + /// The to monitor for cancellation requests. The default is . + /// Asynchronous enumeration of messages. + /// + /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility. + /// + public IAsyncEnumerable InvokeStreamingAsync( + string threadId, + AzureAIInvocationOptions? options, + KernelArguments? arguments = null, + Kernel? kernel = null, + ChatHistory? messages = null, + CancellationToken cancellationToken = default) + { + kernel ??= this.Kernel; + arguments = this.MergeArguments(arguments); + + return AgentThreadActions.InvokeStreamingAsync(this, this._client, threadId, messages, options, this.Logger, kernel, arguments, cancellationToken); + } + + /// + protected override IEnumerable GetChannelKeys() + { + // Distinguish from other channel types. + yield return typeof(AzureAIChannel).FullName!; + + foreach (string key in this._channelKeys) + { + yield return key; + } + } + + /// + protected override async Task CreateChannelAsync(CancellationToken cancellationToken) + { + this.Logger.LogAzureAIAgentCreatingChannel(nameof(CreateChannelAsync), nameof(AzureAIChannel)); + + string threadId = await AgentThreadActions.CreateThreadAsync(this._client, cancellationToken).ConfigureAwait(false); + + this.Logger.LogInformation("[{MethodName}] Created assistant thread: {ThreadId}", nameof(CreateChannelAsync), threadId); + + AzureAIChannel channel = + new(this._client, threadId) + { + Logger = this.LoggerFactory.CreateLogger() + }; + + this.Logger.LogAzureAIAgentCreatedChannel(nameof(CreateChannelAsync), nameof(AzureAIChannel), threadId); + + return channel; + } + + internal Task GetInstructionsAsync(Kernel kernel, KernelArguments? arguments, CancellationToken cancellationToken) + { + return this.FormatInstructionsAsync(kernel, arguments, cancellationToken); + } + + /// + protected override async Task RestoreChannelAsync(string channelState, CancellationToken cancellationToken) + { + string threadId = channelState; + + this.Logger.LogAzureAIAgentRestoringChannel(nameof(RestoreChannelAsync), nameof(AzureAIChannel), threadId); + + AgentThread thread = await this._client.GetThreadAsync(threadId, cancellationToken).ConfigureAwait(false); + + this.Logger.LogAzureAIAgentRestoredChannel(nameof(RestoreChannelAsync), nameof(AzureAIChannel), threadId); + + return new AzureAIChannel(this._client, thread.Id); + } + + /// + /// Initializes a new instance of the class. + /// + /// The agent model definition. + /// A instance. + /// An optional template factory + public AzureAIAgent( + Azure.AI.Projects.Agent model, + AzureAIClientProvider clientProvider, + IPromptTemplateFactory? templateFactory = null) + { + this._provider = clientProvider; + this._client = clientProvider.Client.GetAgentsClient(); + this._channelKeys = [.. clientProvider.ConfigurationKeys]; + + this.Definition = model; + this.Description = this.Definition.Description; + this.Id = this.Definition.Id; + this.Name = this.Definition.Name; + this.Instructions = this.Definition.Instructions; + + if (templateFactory != null) + { + PromptTemplateConfig templateConfig = new(this.Instructions); + this.Template = templateFactory.Create(templateConfig); + } + } +} diff --git a/dotnet/src/Agents/AzureAI/AzureAIChannel.cs b/dotnet/src/Agents/AzureAI/AzureAIChannel.cs new file mode 100644 index 000000000000..8769ac5426c5 --- /dev/null +++ b/dotnet/src/Agents/AzureAI/AzureAIChannel.cs @@ -0,0 +1,53 @@ +// Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Azure.AI.Projects; +using Microsoft.SemanticKernel.Agents.AzureAI.Internal; + +namespace Microsoft.SemanticKernel.Agents.AzureAI; + +/// +/// A specialization for use with . +/// +internal sealed class AzureAIChannel(AgentsClient client, string threadId) + : AgentChannel +{ + /// + protected override async Task ReceiveAsync(IEnumerable history, CancellationToken cancellationToken) + { + foreach (ChatMessageContent message in history) + { + await AgentThreadActions.CreateMessageAsync(client, threadId, message, cancellationToken).ConfigureAwait(false); + } + } + + /// + protected override IAsyncEnumerable<(bool IsVisible, ChatMessageContent Message)> InvokeAsync( + AzureAIAgent agent, + CancellationToken cancellationToken) + { + return AgentThreadActions.InvokeAsync(agent, client, threadId, invocationOptions: null, this.Logger, agent.Kernel, agent.Arguments, cancellationToken); + } + + /// + protected override IAsyncEnumerable InvokeStreamingAsync(AzureAIAgent agent, IList messages, CancellationToken cancellationToken = default) + { + return AgentThreadActions.InvokeStreamingAsync(agent, client, threadId, messages, invocationOptions: null, this.Logger, agent.Kernel, agent.Arguments, cancellationToken); + } + + /// + protected override IAsyncEnumerable GetHistoryAsync(CancellationToken cancellationToken) + { + return AgentThreadActions.GetMessagesAsync(client, threadId, cancellationToken); + } + + /// + protected override Task ResetAsync(CancellationToken cancellationToken = default) + { + return client.DeleteThreadAsync(threadId, cancellationToken); + } + + /// + protected override string Serialize() { return threadId; } +} diff --git a/dotnet/src/Agents/AzureAI/AzureAIClientProvider.cs b/dotnet/src/Agents/AzureAI/AzureAIClientProvider.cs new file mode 100644 index 000000000000..b5e2eec9c32e --- /dev/null +++ b/dotnet/src/Agents/AzureAI/AzureAIClientProvider.cs @@ -0,0 +1,109 @@ +// Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; +using System.Linq; +using System.Net.Http; +using Azure.AI.Projects; +using Azure.Core; +using Azure.Core.Pipeline; +using Microsoft.SemanticKernel.Http; + +namespace Microsoft.SemanticKernel.Agents.AzureAI; + +/// +/// Provides an for use by . +/// +public sealed class AzureAIClientProvider +{ + /// + /// An active client instance. + /// + public AIProjectClient Client { get; } + + /// + /// Configuration keys required for management. + /// + internal IReadOnlyList ConfigurationKeys { get; } + + private AzureAIClientProvider(AIProjectClient client, IEnumerable keys) + { + this.Client = client; + this.ConfigurationKeys = keys.ToArray(); + } + + /// + /// Produce a . + /// + /// The Azure AI Foundry project connection string, in the form `endpoint;subscription_id;resource_group_name;project_name`. + /// A credential used to authenticate to an Azure Service. + /// Custom for HTTP requests. + public static AzureAIClientProvider FromConnectionString( + string connectionString, + TokenCredential credential, + HttpClient? httpClient = null) + { + Verify.NotNullOrWhiteSpace(connectionString, nameof(connectionString)); + Verify.NotNull(credential, nameof(credential)); + + AIProjectClientOptions clientOptions = CreateAzureClientOptions(httpClient); + + return new(new AIProjectClient(connectionString, credential, clientOptions), CreateConfigurationKeys(connectionString, httpClient)); + } + + /// + /// Directly provide a client instance. + /// + public static AzureAIClientProvider FromClient(AIProjectClient client) + { + return new(client, [client.GetType().FullName!, client.GetHashCode().ToString()]); + } + + private static AIProjectClientOptions CreateAzureClientOptions(HttpClient? httpClient) + { + AIProjectClientOptions options = + new() + { + Diagnostics = { + ApplicationId = HttpHeaderConstant.Values.UserAgent, + } + }; + + options.AddPolicy(new SemanticKernelHeadersPolicy(), HttpPipelinePosition.PerCall); + + if (httpClient is not null) + { + options.Transport = new HttpClientTransport(httpClient); + // Disable retry policy if and only if a custom HttpClient is provided. + options.RetryPolicy = new RetryPolicy(maxRetries: 0); + } + + return options; + } + + private static IEnumerable CreateConfigurationKeys(string connectionString, HttpClient? httpClient) + { + yield return connectionString; + + if (httpClient is not null) + { + if (httpClient.BaseAddress is not null) + { + yield return httpClient.BaseAddress.AbsoluteUri; + } + + foreach (string header in httpClient.DefaultRequestHeaders.SelectMany(h => h.Value)) + { + yield return header; + } + } + } + + private class SemanticKernelHeadersPolicy : HttpPipelineSynchronousPolicy + { + public override void OnSendingRequest(HttpMessage message) + { + message.Request.Headers.Add( + HttpHeaderConstant.Names.SemanticKernelVersion, + HttpHeaderConstant.Values.GetAssemblyVersion(typeof(AzureAIAgent))); + } + } +} diff --git a/dotnet/src/Agents/AzureAI/AzureAIInvocationOptions.cs b/dotnet/src/Agents/AzureAI/AzureAIInvocationOptions.cs new file mode 100644 index 000000000000..8772baf32f7d --- /dev/null +++ b/dotnet/src/Agents/AzureAI/AzureAIInvocationOptions.cs @@ -0,0 +1,104 @@ +// Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Agents.AzureAI; + +/// +/// Defines per invocation execution options that override the assistant definition. +/// +/// +/// Not applicable to usage. +/// +public sealed class AzureAIInvocationOptions +{ + /// + /// Override the AI model targeted by the agent. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? ModelName { get; init; } + + /// + /// Appends additional instructions. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? AdditionalInstructions { get; init; } + + /// + /// Additional messages to add to the thread. + /// + /// + /// Only supports messages with role = User or Assistant: + /// https://platform.openai.com/docs/api-reference/runs/createRun#runs-createrun-additional_messages + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public IReadOnlyList? AdditionalMessages { get; init; } + + /// + /// Set if code_interpreter tool is enabled. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public bool EnableCodeInterpreter { get; init; } + + /// + /// Set if file_search tool is enabled. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public bool EnableFileSearch { get; init; } + + /// + /// Set if json response-format is enabled. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public bool? EnableJsonResponse { get; init; } + + /// + /// The maximum number of completion tokens that may be used over the course of the run. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public int? MaxCompletionTokens { get; init; } + + /// + /// The maximum number of prompt tokens that may be used over the course of the run. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public int? MaxPromptTokens { get; init; } + + /// + /// Enables parallel function calling during tool use. Enabled by default. + /// Use this property to disable. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public bool? ParallelToolCallsEnabled { get; init; } + + /// + /// When set, the thread will be truncated to the N most recent messages in the thread. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public int? TruncationMessageCount { get; init; } + + /// + /// The sampling temperature to use, between 0 and 2. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public float? Temperature { get; init; } + + /// + /// An alternative to sampling with temperature, called nucleus sampling, where the model + /// considers the results of the tokens with top_p probability mass. + /// So 0.1 means only the tokens comprising the top 10% probability mass are considered. + /// + /// + /// Recommended to set this or temperature but not both. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public float? TopP { get; init; } + + /// + /// A set of up to 16 key/value pairs that can be attached to an agent, used for + /// storing additional information about that object in a structured format.Keys + /// may be up to 64 characters in length and values may be up to 512 characters in length. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public IReadOnlyDictionary? Metadata { get; init; } +} diff --git a/dotnet/src/Agents/AzureAI/AzureAIThreadMessageFactory.cs b/dotnet/src/Agents/AzureAI/AzureAIThreadMessageFactory.cs new file mode 100644 index 000000000000..5f9cc8f72c4b --- /dev/null +++ b/dotnet/src/Agents/AzureAI/AzureAIThreadMessageFactory.cs @@ -0,0 +1,23 @@ +// Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; +using Azure.AI.Projects; +using Microsoft.SemanticKernel.Agents.AzureAI.Internal; + +namespace Microsoft.SemanticKernel.Agents.AzureAI; + +/// +/// Exposes patterns for creating and managing agent threads. +/// +/// +/// Support translation of from native models. +/// +public static class AzureAIThreadMessageFactory +{ + /// + /// Translates to for thread creation. + /// + public static IEnumerable Translate(IEnumerable messages) + { + return AgentMessageFactory.GetThreadMessages(messages); + } +} diff --git a/dotnet/src/Agents/AzureAI/Extensions/AgentExtensions.cs b/dotnet/src/Agents/AzureAI/Extensions/AgentExtensions.cs new file mode 100644 index 000000000000..112f85dc162f --- /dev/null +++ b/dotnet/src/Agents/AzureAI/Extensions/AgentExtensions.cs @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft. All rights reserved. +namespace Microsoft.SemanticKernel.Agents.AzureAI; + +/// +/// Extension methods for . +/// +internal static class AgentExtensions +{ + /// + /// Provides a name for the agent, even if it's the identifier. + /// (since allows null) + /// + /// The target agent + /// The agent name as a non-empty string + public static string GetName(this Agent agent) + { + return agent.Name ?? agent.Id; + } +} diff --git a/dotnet/src/Agents/AzureAI/Extensions/AgentRunExtensions.cs b/dotnet/src/Agents/AzureAI/Extensions/AgentRunExtensions.cs new file mode 100644 index 000000000000..7d4cf718b1e0 --- /dev/null +++ b/dotnet/src/Agents/AzureAI/Extensions/AgentRunExtensions.cs @@ -0,0 +1,120 @@ +// Copyright (c) Microsoft. All rights reserved. +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; +using Azure.AI.Projects; +using Microsoft.SemanticKernel.Agents.AzureAI.Internal; + +namespace Microsoft.SemanticKernel.Agents.AzureAI.Extensions; + +/// +/// Extensions associated with an Agent run processing. +/// +/// +/// Improves testability. +/// +internal static class AgentRunExtensions +{ + public static async IAsyncEnumerable GetStepsAsync( + this AgentsClient client, + ThreadRun run, + [EnumeratorCancellation] CancellationToken cancellationToken) + { + PageableList? steps = null; + do + { + steps = await client.GetRunStepsAsync(run, after: steps?.LastId, cancellationToken: cancellationToken).ConfigureAwait(false); + foreach (RunStep step in steps) + { + yield return step; + } + } + while (steps?.HasMore ?? false); + } + + public static async Task CreateAsync( + this AgentsClient client, + string threadId, + AzureAIAgent agent, + string? instructions, + ToolDefinition[] tools, + AzureAIInvocationOptions? invocationOptions, + CancellationToken cancellationToken) + { + TruncationObject? truncationStrategy = GetTruncationStrategy(invocationOptions); + BinaryData? responseFormat = GetResponseFormat(invocationOptions); + return + await client.CreateRunAsync( + threadId, + agent.Definition.Id, + overrideModelName: invocationOptions?.ModelName, + instructions, + additionalInstructions: invocationOptions?.AdditionalInstructions, + additionalMessages: AgentMessageFactory.GetThreadMessages(invocationOptions?.AdditionalMessages).ToArray(), + overrideTools: tools, + stream: false, + temperature: invocationOptions?.Temperature, + topP: invocationOptions?.TopP, + maxPromptTokens: invocationOptions?.MaxPromptTokens, + maxCompletionTokens: invocationOptions?.MaxCompletionTokens, + truncationStrategy, + toolChoice: null, + responseFormat, + parallelToolCalls: invocationOptions?.ParallelToolCallsEnabled, + metadata: invocationOptions?.Metadata, + include: null, + cancellationToken).ConfigureAwait(false); + } + + private static BinaryData? GetResponseFormat(AzureAIInvocationOptions? invocationOptions) + { + return invocationOptions?.EnableJsonResponse == true ? + BinaryData.FromString(ResponseFormat.JsonObject.ToString()) : + null; + } + + private static TruncationObject? GetTruncationStrategy(AzureAIInvocationOptions? invocationOptions) + { + return invocationOptions?.TruncationMessageCount == null ? + null : + new(TruncationStrategy.LastMessages) + { + LastMessages = invocationOptions.TruncationMessageCount + }; + } + + public static IAsyncEnumerable CreateStreamingAsync( + this AgentsClient client, + string threadId, + AzureAIAgent agent, + string? instructions, + ToolDefinition[] tools, + AzureAIInvocationOptions? invocationOptions, + CancellationToken cancellationToken) + { + TruncationObject? truncationStrategy = GetTruncationStrategy(invocationOptions); + BinaryData? responseFormat = GetResponseFormat(invocationOptions); + return + client.CreateRunStreamingAsync( + threadId, + agent.Definition.Id, + overrideModelName: invocationOptions?.ModelName, + instructions, + additionalInstructions: invocationOptions?.AdditionalInstructions, + additionalMessages: AgentMessageFactory.GetThreadMessages(invocationOptions?.AdditionalMessages).ToArray(), + overrideTools: tools, + temperature: invocationOptions?.Temperature, + topP: invocationOptions?.TopP, + maxPromptTokens: invocationOptions?.MaxPromptTokens, + maxCompletionTokens: invocationOptions?.MaxCompletionTokens, + truncationStrategy, + toolChoice: null, + responseFormat, + parallelToolCalls: invocationOptions?.ParallelToolCallsEnabled, + metadata: invocationOptions?.Metadata, + cancellationToken); + } +} diff --git a/dotnet/src/Agents/AzureAI/Extensions/KernelFunctionExtensions.cs b/dotnet/src/Agents/AzureAI/Extensions/KernelFunctionExtensions.cs new file mode 100644 index 000000000000..231a906de6a4 --- /dev/null +++ b/dotnet/src/Agents/AzureAI/Extensions/KernelFunctionExtensions.cs @@ -0,0 +1,93 @@ +// Copyright (c) Microsoft. All rights reserved. +using System; +using System.Collections.Generic; +using System.Linq; +using Azure.AI.Projects; + +namespace Microsoft.SemanticKernel.Agents.AzureAI; + +internal static class KernelFunctionExtensions +{ + /// + /// Convert to an OpenAI tool model. + /// + /// The source function + /// The plugin name + /// An OpenAI tool definition + public static FunctionToolDefinition ToToolDefinition(this KernelFunction function, string pluginName) + { + var metadata = function.Metadata; + if (metadata.Parameters.Count > 0) + { + var required = new List(metadata.Parameters.Count); + var parameters = + metadata.Parameters.ToDictionary( + p => p.Name, + p => + { + if (p.IsRequired) + { + required.Add(p.Name); + } + + return + new + { + type = ConvertType(p.ParameterType), + description = p.Description, + }; + }); + + var spec = + new + { + type = "object", + properties = parameters, + required, + }; + + return new FunctionToolDefinition(FunctionName.ToFullyQualifiedName(function.Name, pluginName), function.Description, BinaryData.FromObjectAsJson(spec)); + } + + return new FunctionToolDefinition(FunctionName.ToFullyQualifiedName(function.Name, pluginName), function.Description); + } + + private static string ConvertType(Type? type) + { + if (type is null || type == typeof(string)) + { + return "string"; + } + + if (type == typeof(bool)) + { + return "boolean"; + } + + if (type.IsEnum) + { + return "enum"; + } + + if (type.IsArray) + { + return "array"; + } + + if (type == typeof(DateTime) || type == typeof(DateTimeOffset)) + { + return "date-time"; + } + + return Type.GetTypeCode(type) switch + { + TypeCode.SByte or TypeCode.Byte or + TypeCode.Int16 or TypeCode.UInt16 or + TypeCode.Int32 or TypeCode.UInt32 or + TypeCode.Int64 or TypeCode.UInt64 or + TypeCode.Single or TypeCode.Double or TypeCode.Decimal => "number", + + _ => "object", + }; + } +} diff --git a/dotnet/src/Agents/AzureAI/Internal/AgentMessageFactory.cs b/dotnet/src/Agents/AzureAI/Internal/AgentMessageFactory.cs new file mode 100644 index 000000000000..621e364acf6a --- /dev/null +++ b/dotnet/src/Agents/AzureAI/Internal/AgentMessageFactory.cs @@ -0,0 +1,98 @@ +// Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; +using System.Linq; +using Azure.AI.Projects; +using Microsoft.SemanticKernel.ChatCompletion; + +namespace Microsoft.SemanticKernel.Agents.AzureAI.Internal; + +/// +/// Factory for creating based on . +/// +/// +/// Improves testability. +/// +internal static class AgentMessageFactory +{ + /// + /// Translate metadata from a to be used for a or + /// . + /// + /// The message content. + public static Dictionary GetMetadata(ChatMessageContent message) + { + return message.Metadata?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value?.ToString() ?? string.Empty) ?? []; + } + + /// + /// Translate attachments from a to be used for a or + /// + /// The message content. + public static IEnumerable GetAttachments(ChatMessageContent message) + { + return + message.Items + .OfType() + .Select( + fileContent => + new MessageAttachment(fileContent.FileId, GetToolDefinition(fileContent.Tools).ToList())); + } + + /// + /// Translates a set of to a set of ."/> + /// + /// A list of objects/ + public static IEnumerable GetThreadMessages(IEnumerable? messages) + { + if (messages is not null) + { + foreach (ChatMessageContent message in messages) + { + string? content = message.Content; + if (string.IsNullOrWhiteSpace(content)) + { + continue; + } + + ThreadMessageOptions threadMessage = new( + role: message.Role == AuthorRole.User ? MessageRole.User : MessageRole.Agent, + content: message.Content) + { + Attachments = GetAttachments(message).ToArray(), + }; + + if (message.Metadata != null) + { + foreach (string key in message.Metadata.Keys) + { + threadMessage.Metadata = GetMetadata(message); + } + } + + yield return threadMessage; + } + } + } + + private static readonly Dictionary s_toolMetadata = new() + { + { AzureAIAgent.Tools.CodeInterpreter, new CodeInterpreterToolDefinition() }, + { AzureAIAgent.Tools.FileSearch, new FileSearchToolDefinition() }, + }; + + private static IEnumerable GetToolDefinition(IEnumerable? tools) + { + if (tools is null) + { + yield break; + } + + foreach (string tool in tools) + { + if (s_toolMetadata.TryGetValue(tool, out ToolDefinition? toolDefinition)) + { + yield return toolDefinition; + } + } + } +} diff --git a/dotnet/src/Agents/AzureAI/Internal/AgentThreadActions.cs b/dotnet/src/Agents/AzureAI/Internal/AgentThreadActions.cs new file mode 100644 index 000000000000..1f78f8ced2a7 --- /dev/null +++ b/dotnet/src/Agents/AzureAI/Internal/AgentThreadActions.cs @@ -0,0 +1,853 @@ +// Copyright (c) Microsoft. All rights reserved. +using System; +using System.ClientModel; +using System.Collections.Generic; +using System.Linq; +using System.Net; +using System.Runtime.CompilerServices; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Azure; +using Azure.AI.Projects; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.Agents.AzureAI.Extensions; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.FunctionCalling; + +namespace Microsoft.SemanticKernel.Agents.AzureAI.Internal; + +/// +/// Actions associated with an Open Assistant thread. +/// +internal static class AgentThreadActions +{ + private static readonly HashSet s_pollingStatuses = + [ + RunStatus.Queued, + RunStatus.InProgress, + RunStatus.Cancelling, + ]; + + private static readonly HashSet s_failureStatuses = + [ + RunStatus.Expired, + RunStatus.Failed, + RunStatus.Cancelled, + ]; + + /// + /// Create a new assistant thread. + /// + /// The assistant client + /// The to monitor for cancellation requests. The default is . + /// The thread identifier + public static async Task CreateThreadAsync(AgentsClient client, CancellationToken cancellationToken = default) + { + AgentThread thread = await client.CreateThreadAsync(cancellationToken: cancellationToken).ConfigureAwait(false); + + return thread.Id; + } + + /// + /// Create a message in the specified thread. + /// + /// The assistant client + /// The thread identifier + /// The message to add + /// The to monitor for cancellation requests. The default is . + /// if a system message is present, without taking any other action + public static async Task CreateMessageAsync(AgentsClient client, string threadId, ChatMessageContent message, CancellationToken cancellationToken) + { + if (message.Items.Any(i => i is FunctionCallContent)) + { + return; + } + + string? content = message.Content; + if (string.IsNullOrWhiteSpace(content)) + { + return; + } + + await client.CreateMessageAsync( + threadId, + role: message.Role == AuthorRole.User ? MessageRole.User : MessageRole.Agent, + content, + attachments: AgentMessageFactory.GetAttachments(message).ToArray(), + metadata: AgentMessageFactory.GetMetadata(message), + cancellationToken).ConfigureAwait(false); + } + + /// + /// Retrieves the thread messages. + /// + /// The assistant client + /// The thread identifier + /// The to monitor for cancellation requests. The default is . + /// Asynchronous enumeration of messages. + public static async IAsyncEnumerable GetMessagesAsync(AgentsClient client, string threadId, [EnumeratorCancellation] CancellationToken cancellationToken) + { + Dictionary agentNames = []; // Cache agent names by their identifier + + string? lastId = null; + PageableList? messages = null; + do + { + messages = await client.GetMessagesAsync(threadId, runId: null, limit: null, ListSortOrder.Descending, after: lastId, before: null, cancellationToken).ConfigureAwait(false); + foreach (ThreadMessage message in messages) + { + Console.WriteLine(message.Id); + lastId = message.Id; + string? assistantName = null; + if (!string.IsNullOrWhiteSpace(message.AssistantId) && + !agentNames.TryGetValue(message.AssistantId, out assistantName)) + { + Azure.AI.Projects.Agent assistant = await client.GetAgentAsync(message.AssistantId, cancellationToken).ConfigureAwait(false); + if (!string.IsNullOrWhiteSpace(assistant.Name)) + { + agentNames.Add(assistant.Id, assistant.Name); + } + } + + assistantName ??= message.AssistantId; + + ChatMessageContent content = GenerateMessageContent(assistantName, message); + + if (content.Items.Count > 0) + { + yield return content; + } + } + } while (messages?.HasMore ?? false); + } + + /// + /// Invoke the assistant on the specified thread. + /// In the enumeration returned by this method, a message is considered visible if it is intended to be displayed to the user. + /// Example of a non-visible message is function-content for functions that are automatically executed. + /// + /// The assistant agent to interact with the thread. + /// The assistant client + /// The thread identifier + /// Options to utilize for the invocation + /// The logger to utilize (might be agent or channel scoped) + /// The plugins and other state. + /// Optional arguments to pass to the agents's invocation, including any . + /// The to monitor for cancellation requests. The default is . + /// Asynchronous enumeration of messages. + public static async IAsyncEnumerable<(bool IsVisible, ChatMessageContent Message)> InvokeAsync( + AzureAIAgent agent, + AgentsClient client, + string threadId, + AzureAIInvocationOptions? invocationOptions, + ILogger logger, + Kernel kernel, + KernelArguments? arguments, + [EnumeratorCancellation] CancellationToken cancellationToken) + { + logger.LogAzureAIAgentCreatingRun(nameof(InvokeAsync), threadId); + + ToolDefinition[]? tools = [.. agent.Definition.Tools, .. kernel.Plugins.SelectMany(p => p.Select(f => f.ToToolDefinition(p.Name)))]; + + string? instructions = await agent.GetInstructionsAsync(kernel, arguments, cancellationToken).ConfigureAwait(false); + + ThreadRun run = await client.CreateAsync(threadId, agent, instructions, tools, invocationOptions, cancellationToken).ConfigureAwait(false); + + logger.LogAzureAIAgentCreatedRun(nameof(InvokeAsync), run.Id, threadId); + + FunctionCallsProcessor functionProcessor = new(logger); + // This matches current behavior. Will be configurable upon integrating with `FunctionChoice` (#6795/#5200) + FunctionChoiceBehaviorOptions functionOptions = new() { AllowConcurrentInvocation = true, AllowParallelCalls = true }; + + // Evaluate status and process steps and messages, as encountered. + HashSet processedStepIds = []; + Dictionary functionSteps = []; + do + { + // Check for cancellation + cancellationToken.ThrowIfCancellationRequested(); + + // Poll run and steps until actionable + await PollRunStatusAsync().ConfigureAwait(false); + + // Is in terminal state? + if (s_failureStatuses.Contains(run.Status)) + { + throw new KernelException($"Agent Failure - Run terminated: {run.Status} [{run.Id}]: {run.LastError?.Message ?? "Unknown"}"); + } + + RunStep[] steps = await client.GetStepsAsync(run, cancellationToken).ToArrayAsync(cancellationToken).ConfigureAwait(false); + + // Is tool action required? + if (run.Status == RunStatus.RequiresAction) + { + logger.LogAzureAIAgentProcessingRunSteps(nameof(InvokeAsync), run.Id, threadId); + + // Execute functions in parallel and post results at once. + FunctionCallContent[] functionCalls = steps.SelectMany(step => ParseFunctionStep(agent, step)).ToArray(); + if (functionCalls.Length > 0) + { + // Emit function-call content + ChatMessageContent functionCallMessage = GenerateFunctionCallContent(agent.GetName(), functionCalls); + yield return (IsVisible: false, Message: functionCallMessage); + + // Invoke functions for each tool-step + FunctionResultContent[] functionResults = + await functionProcessor.InvokeFunctionCallsAsync( + functionCallMessage, + (_) => true, + functionOptions, + kernel, + isStreaming: false, + cancellationToken).ToArrayAsync(cancellationToken).ConfigureAwait(false); + + // Capture function-call for message processing + foreach (FunctionResultContent functionCall in functionResults) + { + functionSteps.Add(functionCall.CallId!, functionCall); + } + + // Process tool output + ToolOutput[] toolOutputs = GenerateToolOutputs(functionResults); + + await client.SubmitToolOutputsToRunAsync(run, toolOutputs, cancellationToken).ConfigureAwait(false); + } + + logger.LogAzureAIAgentProcessedRunSteps(nameof(InvokeAsync), functionCalls.Length, run.Id, threadId); + } + + // Enumerate completed messages + logger.LogAzureAIAgentProcessingRunMessages(nameof(InvokeAsync), run.Id, threadId); + + IEnumerable completedStepsToProcess = + steps + .Where(s => s.CompletedAt.HasValue && !processedStepIds.Contains(s.Id)) + .OrderBy(s => s.CreatedAt); + + int messageCount = 0; + foreach (RunStep completedStep in completedStepsToProcess) + { + if (completedStep.Type == RunStepType.ToolCalls) + { + RunStepToolCallDetails toolDetails = (RunStepToolCallDetails)completedStep.StepDetails; + foreach (RunStepToolCall toolCall in toolDetails.ToolCalls) + { + bool isVisible = false; + ChatMessageContent? content = null; + + // Process code-interpreter content + if (toolCall is RunStepCodeInterpreterToolCall codeTool) + { + content = GenerateCodeInterpreterContent(agent.GetName(), codeTool.Input, completedStep); + isVisible = true; + } + // Process function result content + else if (toolCall is RunStepFunctionToolCall functionTool) + { + FunctionResultContent functionStep = functionSteps[functionTool.Id]; // Function step always captured on invocation + content = GenerateFunctionResultContent(agent.GetName(), [functionStep], completedStep); + } + + if (content is not null) + { + ++messageCount; + + yield return (isVisible, Message: content); + } + } + } + else if (completedStep.Type == RunStepType.MessageCreation) + { + // Retrieve the message + RunStepMessageCreationDetails messageDetails = (RunStepMessageCreationDetails)completedStep.StepDetails; + ThreadMessage? message = await RetrieveMessageAsync(client, threadId, messageDetails.MessageCreation.MessageId, agent.PollingOptions.MessageSynchronizationDelay, cancellationToken).ConfigureAwait(false); + + if (message is not null) + { + ChatMessageContent content = GenerateMessageContent(agent.GetName(), message, completedStep); + + if (content.Items.Count > 0) + { + ++messageCount; + + yield return (IsVisible: true, Message: content); + } + } + } + + processedStepIds.Add(completedStep.Id); + } + + logger.LogAzureAIAgentProcessedRunMessages(nameof(InvokeAsync), messageCount, run.Id, threadId); + } + while (RunStatus.Completed != run.Status); + + logger.LogAzureAIAgentCompletedRun(nameof(InvokeAsync), run.Id, threadId); + + // Local function to assist in run polling (participates in method closure). + async Task PollRunStatusAsync() + { + logger.LogAzureAIAgentPollingRunStatus(nameof(PollRunStatusAsync), run.Id, threadId); + + int count = 0; + + do + { + cancellationToken.ThrowIfCancellationRequested(); + + if (count > 0) + { + // Reduce polling frequency after a couple attempts + await Task.Delay(agent.PollingOptions.GetPollingInterval(count), cancellationToken).ConfigureAwait(false); + } + + ++count; + + try + { + run = await client.GetRunAsync(threadId, run.Id, cancellationToken).ConfigureAwait(false); + } + // The presence of a `Status` code means the server responded with error...always fail in that case + catch (ClientResultException clientException) when (clientException.Status <= 0) + { + // Check maximum retry count + if (count >= agent.PollingOptions.MaximumRetryCount) + { + throw; + } + + // Retry for potential transient failure + continue; + } + catch (AggregateException aggregateException) when (aggregateException.InnerException is ClientResultException innerClientException) + { + // The presence of a `Status` code means the server responded with error + if (innerClientException.Status > 0) + { + throw; + } + + // Check maximum retry count + if (count >= agent.PollingOptions.MaximumRetryCount) + { + throw; + } + + // Retry for potential transient failure + continue; + } + } + while (s_pollingStatuses.Contains(run.Status)); + + logger.LogAzureAIAgentPolledRunStatus(nameof(PollRunStatusAsync), run.Status, run.Id, threadId); + } + } + + /// + /// Invoke the assistant on the specified thread using streaming. + /// + /// The assistant agent to interact with the thread. + /// The assistant client + /// The thread identifier + /// The receiver for the completed messages generated + /// Options to utilize for the invocation + /// The logger to utilize (might be agent or channel scoped) + /// The plugins and other state. + /// Optional arguments to pass to the agents's invocation, including any . + /// The to monitor for cancellation requests. The default is . + /// Asynchronous enumeration of messages. + /// + /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility. + /// + public static async IAsyncEnumerable InvokeStreamingAsync( + AzureAIAgent agent, + AgentsClient client, + string threadId, + IList? messages, + AzureAIInvocationOptions? invocationOptions, + ILogger logger, + Kernel kernel, + KernelArguments? arguments, + [EnumeratorCancellation] CancellationToken cancellationToken) + { + logger.LogAzureAIAgentCreatingRun(nameof(InvokeAsync), threadId); + + ToolDefinition[]? tools = [.. agent.Definition.Tools, .. kernel.Plugins.SelectMany(p => p.Select(f => f.ToToolDefinition(p.Name)))]; + + string? instructions = await agent.GetInstructionsAsync(kernel, arguments, cancellationToken).ConfigureAwait(false); + + // Evaluate status and process steps and messages, as encountered. + HashSet processedStepIds = []; + Dictionary stepFunctionResults = []; + List stepsToProcess = []; + + FunctionCallsProcessor functionProcessor = new(logger); + // This matches current behavior. Will be configurable upon integrating with `FunctionChoice` (#6795/#5200) + FunctionChoiceBehaviorOptions functionOptions = new() { AllowConcurrentInvocation = true, AllowParallelCalls = true }; + + ThreadRun? run = null; + IAsyncEnumerable asyncUpdates = client.CreateStreamingAsync(threadId, agent, instructions, tools, invocationOptions, cancellationToken); + do + { + // Check for cancellation + cancellationToken.ThrowIfCancellationRequested(); + + stepsToProcess.Clear(); + + await foreach (StreamingUpdate update in asyncUpdates.ConfigureAwait(false)) + { + if (update is RunUpdate runUpdate) + { + run = runUpdate.Value; + } + else if (update is MessageContentUpdate contentUpdate) + { + switch (contentUpdate.UpdateKind) + { + case StreamingUpdateReason.MessageUpdated: + yield return GenerateStreamingMessageContent(agent.GetName(), contentUpdate); + break; + } + } + else if (update is RunStepDetailsUpdate detailsUpdate) + { + StreamingChatMessageContent? toolContent = GenerateStreamingCodeInterpreterContent(agent.GetName(), detailsUpdate); + if (toolContent != null) + { + yield return toolContent; + } + else if (detailsUpdate.FunctionOutput != null) + { + yield return + new StreamingChatMessageContent(AuthorRole.Assistant, null) + { + AuthorName = agent.Name, + Items = [new StreamingFunctionCallUpdateContent(detailsUpdate.ToolCallId, detailsUpdate.FunctionName, detailsUpdate.FunctionArguments)] + }; + } + } + else if (update is RunStepUpdate stepUpdate) + { + switch (stepUpdate.UpdateKind) + { + case StreamingUpdateReason.RunStepCompleted: + stepsToProcess.Add(stepUpdate.Value); + break; + default: + break; + } + } + } + + if (run == null) + { + throw new KernelException($"Agent Failure - Run not created for thread: ${threadId}"); + } + + // Is in terminal state? + if (s_failureStatuses.Contains(run.Status)) + { + throw new KernelException($"Agent Failure - Run terminated: {run.Status} [{run.Id}]: {run.LastError?.Message ?? "Unknown"}"); + } + + if (run.Status == RunStatus.RequiresAction) + { + RunStep[] activeSteps = + await client.GetStepsAsync(run, cancellationToken) + .Where(step => step.Status == RunStepStatus.InProgress) + .ToArrayAsync(cancellationToken).ConfigureAwait(false); + + // Capture map between the tool call and its associated step + Dictionary toolMap = []; + foreach (RunStep step in activeSteps) + { + RunStepToolCallDetails toolCallDetails = (RunStepToolCallDetails)step.StepDetails; + foreach (RunStepToolCall stepDetails in toolCallDetails.ToolCalls) + { + toolMap[stepDetails.Id] = step.Id; + } + } + + // Execute functions in parallel and post results at once. + FunctionCallContent[] functionCalls = activeSteps.SelectMany(step => ParseFunctionStep(agent, step)).ToArray(); + if (functionCalls.Length > 0) + { + // Emit function-call content + ChatMessageContent functionCallMessage = GenerateFunctionCallContent(agent.GetName(), functionCalls); + messages?.Add(functionCallMessage); + + FunctionResultContent[] functionResults = + await functionProcessor.InvokeFunctionCallsAsync( + functionCallMessage, + (_) => true, + functionOptions, + kernel, + isStreaming: true, + cancellationToken).ToArrayAsync(cancellationToken).ConfigureAwait(false); + + // Process tool output + ToolOutput[] toolOutputs = GenerateToolOutputs(functionResults); + asyncUpdates = client.SubmitToolOutputsToStreamAsync(run, toolOutputs, cancellationToken); + + foreach (RunStep step in activeSteps) + { + stepFunctionResults.Add(step.Id, functionResults.Where(result => step.Id == toolMap[result.CallId!]).ToArray()); + } + } + } + + if (stepsToProcess.Count > 0) + { + logger.LogAzureAIAgentProcessingRunMessages(nameof(InvokeAsync), run!.Id, threadId); + + foreach (RunStep step in stepsToProcess) + { + if (step.StepDetails is RunStepMessageCreationDetails messageDetails) + { + ThreadMessage? message = + await RetrieveMessageAsync( + client, + threadId, + messageDetails.MessageCreation.MessageId, + agent.PollingOptions.MessageSynchronizationDelay, + cancellationToken).ConfigureAwait(false); + + if (message != null) + { + ChatMessageContent content = GenerateMessageContent(agent.GetName(), message, step); + messages?.Add(content); + } + } + else if (step.StepDetails is RunStepToolCallDetails toolDetails) + { + foreach (RunStepToolCall toolCall in toolDetails.ToolCalls) + { + if (toolCall is RunStepFunctionToolCall functionCall) + { + messages?.Add(GenerateFunctionResultContent(agent.GetName(), stepFunctionResults[step.Id], step)); + stepFunctionResults.Remove(step.Id); + break; + } + + if (toolCall is RunStepCodeInterpreterToolCall codeCall) + { + messages?.Add(GenerateCodeInterpreterContent(agent.GetName(), codeCall.Input, step)); + } + } + } + } + + logger.LogAzureAIAgentProcessedRunMessages(nameof(InvokeAsync), stepsToProcess.Count, run!.Id, threadId); + } + } + while (run?.Status != RunStatus.Completed); + + logger.LogAzureAIAgentCompletedRun(nameof(InvokeAsync), run?.Id ?? "Failed", threadId); + } + + private static ChatMessageContent GenerateMessageContent(string? assistantName, ThreadMessage message, RunStep? completedStep = null) + { + AuthorRole role = new(message.Role.ToString()); + + Dictionary? metadata = + new() + { + { nameof(ThreadMessage.CreatedAt), message.CreatedAt }, + { nameof(ThreadMessage.AssistantId), message.AssistantId }, + { nameof(ThreadMessage.ThreadId), message.ThreadId }, + { nameof(ThreadMessage.RunId), message.RunId }, + { nameof(MessageContentUpdate.MessageId), message.Id }, + }; + + if (completedStep != null) + { + metadata[nameof(RunStepDetailsUpdate.StepId)] = completedStep.Id; + metadata[nameof(RunStep.Usage)] = completedStep.Usage; + } + + ChatMessageContent content = + new(role, content: null) + { + AuthorName = assistantName, + Metadata = metadata, + }; + + foreach (MessageContent itemContent in message.ContentItems) + { + // Process text content + if (itemContent is MessageTextContent textContent) + { + content.Items.Add(new TextContent(textContent.Text)); + + foreach (MessageTextAnnotation annotation in textContent.Annotations) + { + content.Items.Add(GenerateAnnotationContent(annotation)); + } + } + // Process image content + else if (itemContent is MessageImageFileContent imageContent) + { + content.Items.Add(new FileReferenceContent(imageContent.FileId)); + } + } + + return content; + } + + private static StreamingChatMessageContent GenerateStreamingMessageContent(string? assistantName, MessageContentUpdate update) + { + StreamingChatMessageContent content = + new(AuthorRole.Assistant, content: null) + { + AuthorName = assistantName, + }; + + // Process text content + if (!string.IsNullOrEmpty(update.Text)) + { + content.Items.Add(new StreamingTextContent(update.Text)); + } + // Process image content + else if (update.ImageFileId != null) + { + content.Items.Add(new StreamingFileReferenceContent(update.ImageFileId)); + } + // Process annotations + else if (update.TextAnnotation != null) + { + content.Items.Add(GenerateStreamingAnnotationContent(update.TextAnnotation)); + } + + if (update.Role.HasValue && update.Role.Value != MessageRole.User) + { + content.Role = new(update.Role.Value.ToString() ?? MessageRole.Agent.ToString()); + } + + return content; + } + + private static StreamingChatMessageContent? GenerateStreamingCodeInterpreterContent(string? assistantName, RunStepDetailsUpdate update) + { + StreamingChatMessageContent content = + new(AuthorRole.Assistant, content: null) + { + AuthorName = assistantName, + }; + + // Process text content + if (update.CodeInterpreterInput != null) + { + content.Items.Add(new StreamingTextContent(update.CodeInterpreterInput)); + content.Metadata = new Dictionary { { AzureAIAgent.CodeInterpreterMetadataKey, true } }; + } + + if ((update.CodeInterpreterOutputs?.Count ?? 0) > 0) + { + foreach (RunStepDeltaCodeInterpreterOutput output in update.CodeInterpreterOutputs!) + { + if (output is RunStepDeltaCodeInterpreterImageOutput imageOutput) + { + content.Items.Add(new StreamingFileReferenceContent(imageOutput.Image.FileId)); + } + } + } + + return content.Items.Count > 0 ? content : null; + } + + private static AnnotationContent GenerateAnnotationContent(MessageTextAnnotation annotation) + { + string? fileId = null; + + if (annotation is MessageTextFileCitationAnnotation fileCitationAnnotation) + { + fileId = fileCitationAnnotation.FileId; + } + else if (annotation is MessageTextFilePathAnnotation filePathAnnotation) + { + fileId = filePathAnnotation.FileId; + } + + return + new(annotation.Text) + { + Quote = annotation.Text, + FileId = fileId, + }; + } + + private static StreamingAnnotationContent GenerateStreamingAnnotationContent(TextAnnotationUpdate annotation) + { + string? fileId = null; + + if (!string.IsNullOrEmpty(annotation.OutputFileId)) + { + fileId = annotation.OutputFileId; + } + else if (!string.IsNullOrEmpty(annotation.InputFileId)) + { + fileId = annotation.InputFileId; + } + + return + new(annotation.TextToReplace) + { + StartIndex = annotation.StartIndex ?? 0, + EndIndex = annotation.EndIndex ?? 0, + FileId = fileId, + }; + } + + private static ChatMessageContent GenerateCodeInterpreterContent(string agentName, string pythonCode, RunStep completedStep) + { + Dictionary metadata = GenerateToolCallMetadata(completedStep); + metadata[AzureAIAgent.CodeInterpreterMetadataKey] = true; + + return + new ChatMessageContent( + AuthorRole.Assistant, + [ + new TextContent(pythonCode) + ]) + { + AuthorName = agentName, + Metadata = metadata, + }; + } + + private static IEnumerable ParseFunctionStep(AzureAIAgent agent, RunStep step) + { + if (step.Status == RunStepStatus.InProgress && step.Type == RunStepType.ToolCalls) + { + RunStepToolCallDetails toolCallDetails = (RunStepToolCallDetails)step.StepDetails; + foreach (RunStepToolCall toolCall in toolCallDetails.ToolCalls) + { + if (toolCall is RunStepFunctionToolCall functionCall) + { + (FunctionName nameParts, KernelArguments functionArguments) = ParseFunctionCall(functionCall.Name, functionCall.Arguments); + + FunctionCallContent content = new(nameParts.Name, nameParts.PluginName, toolCall.Id, functionArguments); + + yield return content; + } + } + } + } + + private static (FunctionName functionName, KernelArguments arguments) ParseFunctionCall(string functionName, string? functionArguments) + { + FunctionName nameParts = FunctionName.Parse(functionName); + + KernelArguments arguments = []; + + if (!string.IsNullOrWhiteSpace(functionArguments)) + { + foreach (var argumentKvp in JsonSerializer.Deserialize>(functionArguments!)!) + { + arguments[argumentKvp.Key] = argumentKvp.Value.ToString(); + } + } + + return (nameParts, arguments); + } + + private static ChatMessageContent GenerateFunctionCallContent(string agentName, IList functionCalls) + { + ChatMessageContent functionCallContent = new(AuthorRole.Assistant, content: null) + { + AuthorName = agentName + }; + + functionCallContent.Items.AddRange(functionCalls); + + return functionCallContent; + } + + private static ChatMessageContent GenerateFunctionResultContent(string agentName, IEnumerable functionResults, RunStep completedStep) + { + ChatMessageContent functionResultContent = new(AuthorRole.Tool, content: null) + { + AuthorName = agentName, + Metadata = GenerateToolCallMetadata(completedStep), + }; + + foreach (FunctionResultContent functionResult in functionResults) + { + functionResultContent.Items.Add( + new FunctionResultContent( + functionResult.FunctionName, + functionResult.PluginName, + functionResult.CallId, + functionResult.Result)); + } + + return functionResultContent; + } + + private static Dictionary GenerateToolCallMetadata(RunStep completedStep) + { + return new() + { + { nameof(RunStep.CreatedAt), completedStep.CreatedAt }, + { nameof(RunStep.AssistantId), completedStep.AssistantId }, + { nameof(RunStep.ThreadId), completedStep.ThreadId }, + { nameof(RunStep.RunId), completedStep.RunId }, + { nameof(RunStepDetailsUpdate.StepId), completedStep.Id }, + { nameof(RunStep.Usage), completedStep.Usage }, + }; + } + + private static ToolOutput[] GenerateToolOutputs(FunctionResultContent[] functionResults) + { + ToolOutput[] toolOutputs = new ToolOutput[functionResults.Length]; + + for (int index = 0; index < functionResults.Length; ++index) + { + FunctionResultContent functionResult = functionResults[index]; + + object resultValue = functionResult.Result ?? string.Empty; + + if (resultValue is not string textResult) + { + textResult = JsonSerializer.Serialize(resultValue); + } + + toolOutputs[index] = new ToolOutput(functionResult.CallId, textResult!); + } + + return toolOutputs; + } + + private static async Task RetrieveMessageAsync(AgentsClient client, string threadId, string messageId, TimeSpan syncDelay, CancellationToken cancellationToken) + { + ThreadMessage? message = null; + + bool retry = false; + int count = 0; + do + { + try + { + message = await client.GetMessageAsync(threadId, messageId, cancellationToken).ConfigureAwait(false); + } + catch (RequestFailedException exception) + { + // Step has provided the message-id. Retry on of NotFound/404 exists. + // Extremely rarely there might be a synchronization issue between the + // assistant response and message-service. + retry = exception.Status == (int)HttpStatusCode.NotFound && count < 3; + } + + if (retry) + { + await Task.Delay(syncDelay, cancellationToken).ConfigureAwait(false); + } + + ++count; + } + while (retry); + + return message; + } +} diff --git a/dotnet/src/Agents/AzureAI/Logging/AgentThreadActionsLogMessages.cs b/dotnet/src/Agents/AzureAI/Logging/AgentThreadActionsLogMessages.cs new file mode 100644 index 000000000000..974af70205eb --- /dev/null +++ b/dotnet/src/Agents/AzureAI/Logging/AgentThreadActionsLogMessages.cs @@ -0,0 +1,139 @@ +// Copyright (c) Microsoft. All rights reserved. +using System.Diagnostics.CodeAnalysis; +using Azure.AI.Projects; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.Agents.AzureAI.Internal; + +namespace Microsoft.SemanticKernel.Agents.AzureAI; + +#pragma warning disable SYSLIB1006 // Multiple logging methods cannot use the same event id within a class + +/// +/// Extensions for logging . +/// +/// +/// This extension uses the to +/// generate logging code at compile time to achieve optimized code. +/// +[ExcludeFromCodeCoverage] +internal static partial class AgentThreadActionsLogMessages +{ + /// + /// Logs creating run (started). + /// + [LoggerMessage( + EventId = 0, + Level = LogLevel.Debug, + Message = "[{MethodName}] Creating run for thread: {ThreadId}.")] + public static partial void LogAzureAIAgentCreatingRun( + this ILogger logger, + string methodName, + string threadId); + + /// + /// Logs created run (complete). + /// + [LoggerMessage( + EventId = 0, + Level = LogLevel.Information, + Message = "[{MethodName}] Created run for thread: {RunId}/{ThreadId}.")] + public static partial void LogAzureAIAgentCreatedRun( + this ILogger logger, + string methodName, + string runId, + string threadId); + + /// + /// Logs completed run (complete). + /// + [LoggerMessage( + EventId = 0, + Level = LogLevel.Information, + Message = "[{MethodName}] Completed run for thread: {RunId}/{ThreadId}.")] + public static partial void LogAzureAIAgentCompletedRun( + this ILogger logger, + string methodName, + string runId, + string threadId); + + /// + /// Logs processing run steps (started). + /// + [LoggerMessage( + EventId = 0, + Level = LogLevel.Debug, + Message = "[{MethodName}] Processing run steps for thread: {RunId}/{ThreadId}.")] + public static partial void LogAzureAIAgentProcessingRunSteps( + this ILogger logger, + string methodName, + string runId, + string threadId); + + /// + /// Logs processed run steps (complete). + /// + [LoggerMessage( + EventId = 0, + Level = LogLevel.Information, + Message = "[{MethodName}] Processed #{stepCount} run steps: {RunId}/{ThreadId}.")] + public static partial void LogAzureAIAgentProcessedRunSteps( + this ILogger logger, + string methodName, + int stepCount, + string runId, + string threadId); + + /// + /// Logs processing run messages (started). + /// + [LoggerMessage( + EventId = 0, + Level = LogLevel.Debug, + Message = "[{MethodName}] Processing run messages for thread: {RunId}/{ThreadId}.")] + public static partial void LogAzureAIAgentProcessingRunMessages( + this ILogger logger, + string methodName, + string runId, + string threadId); + + /// + /// Logs processed run messages (complete). + /// + [LoggerMessage( + EventId = 0, + Level = LogLevel.Information, + Message = "[{MethodName}] Processed #{MessageCount} run steps: {RunId}/{ThreadId}.")] + public static partial void LogAzureAIAgentProcessedRunMessages( + this ILogger logger, + string methodName, + int messageCount, + string runId, + string threadId); + + /// + /// Logs polling run status (started). + /// + [LoggerMessage( + EventId = 0, + Level = LogLevel.Debug, + Message = "[{MethodName}] Polling run status for thread: {RunId}/{ThreadId}.")] + public static partial void LogAzureAIAgentPollingRunStatus( + this ILogger logger, + string methodName, + string runId, + string threadId); + + /// + /// Logs polled run status (complete). + /// + [LoggerMessage( + EventId = 0, + Level = LogLevel.Information, + Message = "[{MethodName}] Run status is {RunStatus}: {RunId}/{ThreadId}.")] + public static partial void LogAzureAIAgentPolledRunStatus( + this ILogger logger, + string methodName, + RunStatus runStatus, + string runId, + string threadId); +} diff --git a/dotnet/src/Agents/AzureAI/Logging/AzureAIAgentLogMessages.cs b/dotnet/src/Agents/AzureAI/Logging/AzureAIAgentLogMessages.cs new file mode 100644 index 000000000000..7056ddc746c0 --- /dev/null +++ b/dotnet/src/Agents/AzureAI/Logging/AzureAIAgentLogMessages.cs @@ -0,0 +1,69 @@ +// Copyright (c) Microsoft. All rights reserved. +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.Logging; + +namespace Microsoft.SemanticKernel.Agents.AzureAI; + +#pragma warning disable SYSLIB1006 // Multiple logging methods cannot use the same event id within a class + +/// +/// Extensions for logging invocations. +/// +/// +/// This extension uses the to +/// generate logging code at compile time to achieve optimized code. +/// +[ExcludeFromCodeCoverage] +internal static partial class AzureAIAgentLogMessages +{ + /// + /// Logs creating channel (started). + /// + [LoggerMessage( + EventId = 0, + Level = LogLevel.Debug, + Message = "[{MethodName}] Creating assistant thread for {ChannelType}.")] + public static partial void LogAzureAIAgentCreatingChannel( + this ILogger logger, + string methodName, + string channelType); + + /// + /// Logs created channel (complete). + /// + [LoggerMessage( + EventId = 0, + Level = LogLevel.Information, + Message = "[{MethodName}] Created assistant thread for {ChannelType}: #{ThreadId}.")] + public static partial void LogAzureAIAgentCreatedChannel( + this ILogger logger, + string methodName, + string channelType, + string threadId); + + /// + /// Logs restoring serialized channel (started). + /// + [LoggerMessage( + EventId = 0, + Level = LogLevel.Debug, + Message = "[{MethodName}] Restoring assistant channel for {ChannelType}: #{ThreadId}.")] + public static partial void LogAzureAIAgentRestoringChannel( + this ILogger logger, + string methodName, + string channelType, + string threadId); + + /// + /// Logs restored serialized channel (complete). + /// + [LoggerMessage( + EventId = 0, + Level = LogLevel.Information, + Message = "[{MethodName}] Restored assistant channel for {ChannelType}: #{ThreadId}.")] + public static partial void LogAzureAIAgentRestoredChannel( + this ILogger logger, + string methodName, + string channelType, + string threadId); +} diff --git a/dotnet/src/Agents/AzureAI/Properties/AssemblyInfo.cs b/dotnet/src/Agents/AzureAI/Properties/AssemblyInfo.cs new file mode 100644 index 000000000000..bd1c0f58314e --- /dev/null +++ b/dotnet/src/Agents/AzureAI/Properties/AssemblyInfo.cs @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +// This assembly is currently experimental. +[assembly: Experimental("SKEXP0110")] diff --git a/dotnet/src/Agents/AzureAI/RunPollingOptions.cs b/dotnet/src/Agents/AzureAI/RunPollingOptions.cs new file mode 100644 index 000000000000..7fcb76e5cab9 --- /dev/null +++ b/dotnet/src/Agents/AzureAI/RunPollingOptions.cs @@ -0,0 +1,73 @@ +// Copyright (c) Microsoft. All rights reserved. +using System; + +namespace Microsoft.SemanticKernel.Agents.AzureAI; + +/// +/// Configuration and defaults associated with polling behavior for Assistant API run processing. +/// +public sealed class RunPollingOptions +{ + /// + /// The default maximum number or retries when monitoring thread-run status. + /// + public static int DefaultMaximumRetryCount { get; } = 3; + + /// + /// The default polling interval when monitoring thread-run status. + /// + public static TimeSpan DefaultPollingInterval { get; } = TimeSpan.FromMilliseconds(500); + + /// + /// The default back-off interval when monitoring thread-run status. + /// + public static TimeSpan DefaultPollingBackoff { get; } = TimeSpan.FromSeconds(1); + + /// + /// The default number of polling iterations before using . + /// + public static int DefaultPollingBackoffThreshold { get; } = 2; + + /// + /// The default polling delay when retrying message retrieval due to a 404/NotFound from synchronization lag. + /// + public static TimeSpan DefaultMessageSynchronizationDelay { get; } = TimeSpan.FromMilliseconds(500); + + /// + /// The maximum retry count when polling thread-run status. + /// + /// + /// Only affects failures that have the potential to be transient. Explicit server error responses + /// will result in immediate failure. + /// + public int MaximumRetryCount { get; set; } = DefaultMaximumRetryCount; + + /// + /// The polling interval when monitoring thread-run status. + /// + public TimeSpan RunPollingInterval { get; set; } = DefaultPollingInterval; + + /// + /// The back-off interval when monitoring thread-run status. + /// + public TimeSpan RunPollingBackoff { get; set; } = DefaultPollingBackoff; + + /// + /// The number of polling iterations before using . + /// + public int RunPollingBackoffThreshold { get; set; } = DefaultPollingBackoffThreshold; + + /// + /// The polling delay when retrying message retrieval due to a 404/NotFound from synchronization lag. + /// + public TimeSpan MessageSynchronizationDelay { get; set; } = DefaultMessageSynchronizationDelay; + + /// + /// Gets the polling interval for the specified iteration count. + /// + /// The number of polling iterations already attempted + public TimeSpan GetPollingInterval(int iterationCount) + { + return iterationCount > this.RunPollingBackoffThreshold ? this.RunPollingBackoff : this.RunPollingInterval; + } +} diff --git a/dotnet/src/Agents/OpenAI/Internal/AddHeaderRequestPolicy.cs b/dotnet/src/Agents/OpenAI/Internal/AddHeaderRequestPolicy.cs deleted file mode 100644 index d017fb403f23..000000000000 --- a/dotnet/src/Agents/OpenAI/Internal/AddHeaderRequestPolicy.cs +++ /dev/null @@ -1,13 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. -using Azure.Core; -using Azure.Core.Pipeline; - -namespace Microsoft.SemanticKernel.Agents.OpenAI.Internal; - -/// -/// Helper class to inject headers into Azure SDK HTTP pipeline -/// -internal sealed class AddHeaderRequestPolicy(string headerName, string headerValue) : HttpPipelineSynchronousPolicy -{ - public override void OnSendingRequest(HttpMessage message) => message.Request.Headers.Add(headerName, headerValue); -} diff --git a/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs b/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs index 2e066b91869f..c8434797bc50 100644 --- a/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs +++ b/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs @@ -829,23 +829,6 @@ private static ChatMessageContent GenerateFunctionResultContent(string agentName }; } - private static Task[] ExecuteFunctionSteps(OpenAIAssistantAgent agent, FunctionCallContent[] functionCalls, CancellationToken cancellationToken) - { - Task[] functionTasks = new Task[functionCalls.Length]; - - for (int index = 0; index < functionCalls.Length; ++index) - { - functionTasks[index] = ExecuteFunctionStep(agent, functionCalls[index], cancellationToken); - } - - return functionTasks; - } - - private static Task ExecuteFunctionStep(OpenAIAssistantAgent agent, FunctionCallContent functionCall, CancellationToken cancellationToken) - { - return functionCall.InvokeAsync(agent.Kernel, cancellationToken); - } - private static ToolOutput[] GenerateToolOutputs(FunctionResultContent[] functionResults) { ToolOutput[] toolOutputs = new ToolOutput[functionResults.Length]; diff --git a/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj b/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj index b773878eb397..32d31f65c776 100644 --- a/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj +++ b/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj @@ -12,7 +12,10 @@ + + + @@ -23,8 +26,6 @@ runtime; build; native; contentfiles; analyzers; buildtransitive all - - @@ -32,6 +33,7 @@ + @@ -39,4 +41,8 @@ + + + + diff --git a/dotnet/src/Agents/UnitTests/AzureAI/AzureAIAssistantInvocationOptionsTests.cs b/dotnet/src/Agents/UnitTests/AzureAI/AzureAIAssistantInvocationOptionsTests.cs new file mode 100644 index 000000000000..5eb1ad98e687 --- /dev/null +++ b/dotnet/src/Agents/UnitTests/AzureAI/AzureAIAssistantInvocationOptionsTests.cs @@ -0,0 +1,113 @@ +// Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; +using System.Text.Json; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents.AzureAI; +using Microsoft.SemanticKernel.ChatCompletion; +using SemanticKernel.Agents.UnitTests.Test; +using Xunit; + +namespace SemanticKernel.Agents.UnitTests.AzureAI; + +/// +/// Unit testing of . +/// +public class AzureAIAssistantInvocationOptionsTests +{ + /// + /// Verify initial state. + /// + [Fact] + public void OpenAIAssistantInvocationOptionsInitialState() + { + // Arrange + AzureAIInvocationOptions options = new(); + + // Assert + Assert.Null(options.ModelName); + Assert.Null(options.AdditionalInstructions); + Assert.Null(options.AdditionalMessages); + Assert.Null(options.Metadata); + Assert.Null(options.Temperature); + Assert.Null(options.TopP); + Assert.Null(options.ParallelToolCallsEnabled); + Assert.Null(options.MaxCompletionTokens); + Assert.Null(options.MaxPromptTokens); + Assert.Null(options.TruncationMessageCount); + Assert.Null(options.EnableJsonResponse); + Assert.False(options.EnableCodeInterpreter); + Assert.False(options.EnableFileSearch); + + // Act and Assert + ValidateSerialization(options); + } + + /// + /// Verify initialization. + /// + [Fact] + public void OpenAIAssistantInvocationOptionsAssignment() + { + // Arrange + AzureAIInvocationOptions options = + new() + { + ModelName = "testmodel", + AdditionalInstructions = "test instructions", + AdditionalMessages = [ + new ChatMessageContent(AuthorRole.User, "test message") + ], + Metadata = new Dictionary() { { "a", "1" } }, + MaxCompletionTokens = 1000, + MaxPromptTokens = 1000, + ParallelToolCallsEnabled = false, + TruncationMessageCount = 12, + Temperature = 2, + TopP = 0, + EnableCodeInterpreter = true, + EnableJsonResponse = true, + EnableFileSearch = true, + }; + + // Assert + Assert.Equal("testmodel", options.ModelName); + Assert.Equal("test instructions", options.AdditionalInstructions); + Assert.Single(options.AdditionalMessages); + Assert.Equal(2, options.Temperature); + Assert.Equal(0, options.TopP); + Assert.Equal(1000, options.MaxCompletionTokens); + Assert.Equal(1000, options.MaxPromptTokens); + Assert.Equal(12, options.TruncationMessageCount); + Assert.False(options.ParallelToolCallsEnabled); + Assert.Single(options.Metadata); + Assert.True(options.EnableCodeInterpreter); + Assert.True(options.EnableJsonResponse); + Assert.True(options.EnableFileSearch); + + // Act and Assert + ValidateSerialization(options); + } + + private static void ValidateSerialization(AzureAIInvocationOptions source) + { + // Act + string json = JsonSerializer.Serialize(source); + + AzureAIInvocationOptions? target = JsonSerializer.Deserialize(json); + + // Assert + Assert.NotNull(target); + Assert.Equal(source.AdditionalInstructions, target.AdditionalInstructions); + Assert.Equivalent(source.AdditionalMessages, target.AdditionalMessages); + Assert.Equal(source.ModelName, target.ModelName); + Assert.Equal(source.Temperature, target.Temperature); + Assert.Equal(source.TopP, target.TopP); + Assert.Equal(source.MaxCompletionTokens, target.MaxCompletionTokens); + Assert.Equal(source.MaxPromptTokens, target.MaxPromptTokens); + Assert.Equal(source.TruncationMessageCount, target.TruncationMessageCount); + Assert.Equal(source.EnableCodeInterpreter, target.EnableCodeInterpreter); + Assert.Equal(source.EnableJsonResponse, target.EnableJsonResponse); + Assert.Equal(source.EnableFileSearch, target.EnableFileSearch); + AssertCollection.Equal(source.Metadata, target.Metadata); + } +} diff --git a/dotnet/src/Agents/UnitTests/AzureAI/AzureAIClientProviderTests.cs b/dotnet/src/Agents/UnitTests/AzureAI/AzureAIClientProviderTests.cs new file mode 100644 index 000000000000..96ddfb046896 --- /dev/null +++ b/dotnet/src/Agents/UnitTests/AzureAI/AzureAIClientProviderTests.cs @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft. All rights reserved. +using System; +using System.Net.Http; +using Azure.AI.Projects; +using Azure.Identity; +using Microsoft.SemanticKernel.Agents.AzureAI; +using Moq; +using Xunit; + +namespace SemanticKernel.Agents.UnitTests.AzureAI; + +/// +/// Unit testing of . +/// +public class AzureAIClientProviderTests +{ + /// + /// Verify that provisioning of client for Azure OpenAI. + /// + [Fact] + public void VerifyAzureAIClientProviderDefault() + { + // Act + AzureAIClientProvider provider = AzureAIClientProvider.FromConnectionString("test;test;test;test", new AzureCliCredential()); + + // Assert + Assert.NotNull(provider.Client); + } + + /// + /// Verify that the factory can create a client with http proxy. + /// + [Fact] + public void VerifyAzureAIClientProviderWithHttpClient() + { + // Arrange + using HttpClient httpClient = new() { BaseAddress = new Uri("http://myproxy:9819") }; + + // Act + AzureAIClientProvider provider = AzureAIClientProvider.FromConnectionString("test;test;test;test", new AzureCliCredential(), httpClient); + + // Assert + Assert.NotNull(provider.Client); + + // Arrange + using HttpClient httpClientWithHeaders = new() { BaseAddress = new Uri("http://myproxy:9819") }; + httpClientWithHeaders.DefaultRequestHeaders.Add("X-Test", "Test"); + + // Act + AzureAIClientProvider providerWithHeaders = AzureAIClientProvider.FromConnectionString("test;test;test;test", new AzureCliCredential(), httpClientWithHeaders); + + // Assert + Assert.NotNull(providerWithHeaders.Client); + + Assert.NotEqual(provider.ConfigurationKeys.Count, providerWithHeaders.ConfigurationKeys.Count); + } + + /// + /// Verify that the factory can accept an client that already exists. + /// + [Fact] + public void VerifyAzureAIClientProviderFromClient() + { + // Arrange + Mock mockClient = new(); + AzureAIClientProvider provider = AzureAIClientProvider.FromClient(mockClient.Object); + + // Assert + Assert.NotNull(provider.Client); + Assert.Equal(mockClient.Object, provider.Client); + } +} diff --git a/dotnet/src/Agents/UnitTests/AzureAI/Extensions/KernelFunctionExtensionsTests.cs b/dotnet/src/Agents/UnitTests/AzureAI/Extensions/KernelFunctionExtensionsTests.cs new file mode 100644 index 000000000000..cb8fe8415b97 --- /dev/null +++ b/dotnet/src/Agents/UnitTests/AzureAI/Extensions/KernelFunctionExtensionsTests.cs @@ -0,0 +1,64 @@ +// Copyright (c) Microsoft. All rights reserved. +using System; +using System.ComponentModel; +using Azure.AI.Projects; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents.AzureAI; +using Xunit; + +namespace SemanticKernel.Agents.UnitTests.OpeAzureAInAI.Extensions; + +/// +/// Unit testing of . +/// +public class KernelFunctionExtensionsTests +{ + /// + /// Verify conversion from to . + /// + [Fact] + public void VerifyKernelFunctionToFunctionTool() + { + // Arrange + KernelPlugin plugin = KernelPluginFactory.CreateFromType(); + + // Assert + Assert.Equal(2, plugin.FunctionCount); + + // Arrange + KernelFunction f1 = plugin[nameof(TestPlugin.TestFunction1)]; + KernelFunction f2 = plugin[nameof(TestPlugin.TestFunction2)]; + + // Act + FunctionToolDefinition definition1 = f1.ToToolDefinition("testplugin"); + + // Assert + Assert.StartsWith($"testplugin-{nameof(TestPlugin.TestFunction1)}", definition1.Name, StringComparison.Ordinal); + Assert.Equal("test description", definition1.Description); + + // Act + FunctionToolDefinition definition2 = f2.ToToolDefinition("testplugin"); + + // Assert + Assert.StartsWith($"testplugin-{nameof(TestPlugin.TestFunction2)}", definition2.Name, StringComparison.Ordinal); + Assert.Equal("test description", definition2.Description); + } + + /// + /// Exists only for parsing. + /// +#pragma warning disable CA1812 // Avoid uninstantiated internal classes + private sealed class TestPlugin() +#pragma warning restore CA1812 // Avoid uninstantiated internal classes + { + [KernelFunction] + [Description("test description")] + public void TestFunction1() { } + + [KernelFunction] + [Description("test description")] +#pragma warning disable IDE0060 // Unused parameter for mock kernel function + public void TestFunction2(string p1, bool p2, int p3, string[] p4, ConsoleColor p5, DateTime p6) { } +#pragma warning restore IDE0060 // Unused parameter + } +} diff --git a/dotnet/src/Agents/UnitTests/AzureAI/Internal/AgentMessageFactoryTests.cs b/dotnet/src/Agents/UnitTests/AzureAI/Internal/AgentMessageFactoryTests.cs new file mode 100644 index 000000000000..c42de6fc38a2 --- /dev/null +++ b/dotnet/src/Agents/UnitTests/AzureAI/Internal/AgentMessageFactoryTests.cs @@ -0,0 +1,112 @@ +// Copyright (c) Microsoft. All rights reserved. +using System; +using System.Linq; +using Azure.AI.Projects; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents.AzureAI.Internal; +using Microsoft.SemanticKernel.ChatCompletion; +using Xunit; + +namespace SemanticKernel.Agents.UnitTests.AzureAI.Internal; + +/// +/// Unit testing of . +/// +public class AgentMessageFactoryTests +{ + /// + /// Verify options creation. + /// + [Fact] + public void VerifyAssistantMessageAdapterGetMessageContentsWithText() + { + // Arrange + ChatMessageContent message = new(AuthorRole.User, items: [new TextContent("test")]); + + // Act + ThreadMessageOptions[] contents = AgentMessageFactory.GetThreadMessages([message]).ToArray(); + + // Assert + Assert.NotNull(contents); + Assert.Single(contents); + Assert.NotNull(contents[0].Content); + } + + /// + /// Verify options creation. + /// + [Fact] + public void VerifyAssistantMessageAdapterGetMessageWithImageUrl() + { + // Arrange + ChatMessageContent message = new(AuthorRole.User, items: [new ImageContent(new Uri("https://localhost/myimage.png"))]); + + // Act + ThreadMessageOptions[] contents = AgentMessageFactory.GetThreadMessages([message]).ToArray(); + + // Assert + Assert.NotNull(contents); + Assert.Empty(contents); + } + + /// + /// Verify options creation. + /// + [Fact] + public void VerifyAssistantMessageAdapterGetMessageWithImageData() + { + // Arrange + ChatMessageContent message = new(AuthorRole.User, items: [new ImageContent(new byte[] { 1, 2, 3 }, "image/png") { DataUri = "data:image/png;base64,MTIz" }]); + + // Act + ThreadMessageOptions[] contents = AgentMessageFactory.GetThreadMessages([message]).ToArray(); + + // Assert + Assert.NotNull(contents); + Assert.Empty(contents); + } + + /// + /// Verify options creation. + /// + [Fact] + public void VerifyAssistantMessageAdapterGetMessageWithImageFile() + { + // Arrange + ChatMessageContent message = new(AuthorRole.User, items: [new FileReferenceContent("file-id")]); + + // Act + ThreadMessageOptions[] contents = AgentMessageFactory.GetThreadMessages([message]).ToArray(); + + // Assert + Assert.NotNull(contents); + Assert.Empty(contents); + } + + /// + /// Verify options creation. + /// + [Fact] + public void VerifyAssistantMessageAdapterGetMessageWithAll() + { + // Arrange + ChatMessageContent message = + new( + AuthorRole.User, + items: + [ + new TextContent("test"), + new ImageContent(new Uri("https://localhost/myimage.png")), + new FileReferenceContent("file-id") + ]); + + // Act + ThreadMessageOptions[] contents = AgentMessageFactory.GetThreadMessages([message]).ToArray(); + + // Assert + Assert.NotNull(contents); + Assert.Single(contents); + Assert.NotNull(contents[0].Content); + Assert.Single(contents[0].Attachments); + } +} diff --git a/dotnet/src/Agents/UnitTests/AzureAI/RunPollingOptionsTests.cs b/dotnet/src/Agents/UnitTests/AzureAI/RunPollingOptionsTests.cs new file mode 100644 index 000000000000..9d1054ac9bb6 --- /dev/null +++ b/dotnet/src/Agents/UnitTests/AzureAI/RunPollingOptionsTests.cs @@ -0,0 +1,71 @@ +// Copyright (c) Microsoft. All rights reserved. +using System; +using Microsoft.SemanticKernel.Agents.AzureAI; +using Xunit; + +namespace SemanticKernel.Agents.UnitTests.AzureAI; + +/// +/// Unit testing of . +/// +public class RunPollingOptionsTests +{ + /// + /// Verify initial state. + /// + [Fact] + public void RunPollingOptionsInitialStateTest() + { + // Arrange + RunPollingOptions options = new(); + + // Assert + Assert.Equal(RunPollingOptions.DefaultPollingInterval, options.RunPollingInterval); + Assert.Equal(RunPollingOptions.DefaultPollingBackoff, options.RunPollingBackoff); + Assert.Equal(RunPollingOptions.DefaultMessageSynchronizationDelay, options.MessageSynchronizationDelay); + Assert.Equal(RunPollingOptions.DefaultPollingBackoffThreshold, options.RunPollingBackoffThreshold); + } + + /// s + /// Verify initialization. + /// + [Fact] + public void RunPollingOptionsAssignmentTest() + { + // Arrange + RunPollingOptions options = + new() + { + RunPollingInterval = TimeSpan.FromSeconds(3), + RunPollingBackoff = TimeSpan.FromSeconds(4), + RunPollingBackoffThreshold = 8, + MessageSynchronizationDelay = TimeSpan.FromSeconds(5), + }; + + // Assert + Assert.Equal(3, options.RunPollingInterval.TotalSeconds); + Assert.Equal(4, options.RunPollingBackoff.TotalSeconds); + Assert.Equal(5, options.MessageSynchronizationDelay.TotalSeconds); + Assert.Equal(8, options.RunPollingBackoffThreshold); + } + + /// s + /// Verify initialization. + /// + [Fact] + public void RunPollingOptionsGetIntervalTest() + { + // Arrange + RunPollingOptions options = + new() + { + RunPollingInterval = TimeSpan.FromSeconds(3), + RunPollingBackoff = TimeSpan.FromSeconds(4), + RunPollingBackoffThreshold = 8, + }; + + // Assert + Assert.Equal(options.RunPollingInterval, options.GetPollingInterval(8)); + Assert.Equal(options.RunPollingBackoff, options.GetPollingInterval(9)); + } +} diff --git a/dotnet/src/Agents/UnitTests/OpenAI/Azure/AddHeaderRequestPolicyTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/Azure/AddHeaderRequestPolicyTests.cs deleted file mode 100644 index 6288c6a5aed8..000000000000 --- a/dotnet/src/Agents/UnitTests/OpenAI/Azure/AddHeaderRequestPolicyTests.cs +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. -using System.Linq; -using Azure.Core; -using Azure.Core.Pipeline; -using Microsoft.SemanticKernel.Agents.OpenAI.Internal; -using Xunit; - -namespace SemanticKernel.Agents.UnitTests.OpenAI.Azure; - -/// -/// Unit testing of . -/// -public class AddHeaderRequestPolicyTests -{ - /// - /// Verify behavior of . - /// - [Fact] - public void VerifyAddHeaderRequestPolicyExecution() - { - // Arrange - using HttpClientTransport clientTransport = new(); - HttpPipeline pipeline = new(clientTransport); - - HttpMessage message = pipeline.CreateMessage(); - AddHeaderRequestPolicy policy = new(headerName: "testname", headerValue: "testvalue"); - - // Act - policy.OnSendingRequest(message); - - // Assert - Assert.Single(message.Request.Headers); - HttpHeader header = message.Request.Headers.Single(); - Assert.Equal("testname", header.Name); - Assert.Equal("testvalue", header.Value); - } -} diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantDefinitionTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantDefinitionTests.cs index b0131ac9be6b..f8778a4f2900 100644 --- a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantDefinitionTests.cs +++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantDefinitionTests.cs @@ -2,6 +2,7 @@ using System.Collections.Generic; using System.Text.Json; using Microsoft.SemanticKernel.Agents.OpenAI; +using SemanticKernel.Agents.UnitTests.Test; using Xunit; namespace SemanticKernel.Agents.UnitTests.OpenAI; diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantInvocationOptionsTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantInvocationOptionsTests.cs index 4962a9c04797..0a71201e7626 100644 --- a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantInvocationOptionsTests.cs +++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantInvocationOptionsTests.cs @@ -4,6 +4,7 @@ using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; +using SemanticKernel.Agents.UnitTests.Test; using Xunit; namespace SemanticKernel.Agents.UnitTests.OpenAI; diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIClientProviderTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIClientProviderTests.cs index 6217e1f38395..410b93b3f03b 100644 --- a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIClientProviderTests.cs +++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIClientProviderTests.cs @@ -91,10 +91,10 @@ public void VerifyOpenAIClientProviderWithHttpClient() // Arrange using HttpClient httpClientWithHeaders = new() { BaseAddress = new Uri("http://myproxy:9819") }; - httpClient.DefaultRequestHeaders.Add("X-Test", "Test"); + httpClientWithHeaders.DefaultRequestHeaders.Add("X-Test", "Test"); // Act - OpenAIClientProvider providerWithHeaders = OpenAIClientProvider.ForOpenAI(httpClient: httpClient); + OpenAIClientProvider providerWithHeaders = OpenAIClientProvider.ForOpenAI(httpClient: httpClientWithHeaders); // Assert Assert.NotNull(providerWithHeaders.Client); diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIThreadCreationOptionsTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIThreadCreationOptionsTests.cs index 1689bec1f828..c4b8abca4baf 100644 --- a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIThreadCreationOptionsTests.cs +++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIThreadCreationOptionsTests.cs @@ -4,6 +4,7 @@ using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; +using SemanticKernel.Agents.UnitTests.Test; using Xunit; namespace SemanticKernel.Agents.UnitTests.OpenAI; diff --git a/dotnet/src/Agents/UnitTests/OpenAI/AssertCollection.cs b/dotnet/src/Agents/UnitTests/Test/AssertCollection.cs similarity index 95% rename from dotnet/src/Agents/UnitTests/OpenAI/AssertCollection.cs rename to dotnet/src/Agents/UnitTests/Test/AssertCollection.cs index cd51c736ac18..8a89cba994a2 100644 --- a/dotnet/src/Agents/UnitTests/OpenAI/AssertCollection.cs +++ b/dotnet/src/Agents/UnitTests/Test/AssertCollection.cs @@ -3,7 +3,7 @@ using System.Collections.Generic; using Xunit; -namespace SemanticKernel.Agents.UnitTests.OpenAI; +namespace SemanticKernel.Agents.UnitTests.Test; internal static class AssertCollection { diff --git a/dotnet/src/InternalUtilities/azure/AzureAIUtilities.props b/dotnet/src/InternalUtilities/azure/AzureAIUtilities.props new file mode 100644 index 000000000000..323196e5564b --- /dev/null +++ b/dotnet/src/InternalUtilities/azure/AzureAIUtilities.props @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/dotnet/src/InternalUtilities/azure/Policies/GeneratedActionPipelinePolicy.cs b/dotnet/src/InternalUtilities/azure/Policies/GeneratedActionPipelinePolicy.cs new file mode 100644 index 000000000000..8f412aa9e930 --- /dev/null +++ b/dotnet/src/InternalUtilities/azure/Policies/GeneratedActionPipelinePolicy.cs @@ -0,0 +1,32 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Core.Pipeline; + +/// +/// Generic action pipeline policy for processing messages. +/// +[ExcludeFromCodeCoverage] +internal sealed class GenericActionPipelinePolicy : HttpPipelinePolicy +{ + private readonly Action _processMessageAction; + + internal GenericActionPipelinePolicy(Action processMessageAction) + { + this._processMessageAction = processMessageAction; + } + + public override void Process(HttpMessage message, ReadOnlyMemory pipeline) + { + this._processMessageAction(message); + } + + public override ValueTask ProcessAsync(HttpMessage message, ReadOnlyMemory pipeline) + { + this._processMessageAction(message); + return new ValueTask(Task.CompletedTask); // .NET STD 2.0 compatibility + } +} diff --git a/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAgentsTest.cs b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAgentsTest.cs index 989005333946..1792192d6b90 100644 --- a/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAgentsTest.cs +++ b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAgentsTest.cs @@ -3,8 +3,11 @@ using System.ClientModel; using System.Collections.ObjectModel; using System.Diagnostics; +using Azure.AI.Projects; using Azure.Identity; using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents; +using Microsoft.SemanticKernel.Agents.AzureAI; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; using OpenAI.Assistants; @@ -22,6 +25,8 @@ public abstract class BaseAgentsTest(ITestOutputHelper output) : BaseTest(output /// protected const string AssistantSampleMetadataKey = "sksample"; + protected override bool ForceOpenAI => true; + /// /// Metadata to indicate the assistant as created for a sample. /// @@ -36,16 +41,26 @@ public abstract class BaseAgentsTest(ITestOutputHelper output) : BaseTest(output { AssistantSampleMetadataKey, bool.TrueString } }); + /// + /// Provide a according to the configuration settings. + /// + protected AzureAIClientProvider GetAzureProvider() + { + return AzureAIClientProvider.FromConnectionString(TestConfiguration.AzureAI.ConnectionString, new AzureCliCredential()); + } + /// /// Provide a according to the configuration settings. /// protected OpenAIClientProvider GetClientProvider() - => + { + return this.UseOpenAIConfig ? OpenAIClientProvider.ForOpenAI(new ApiKeyCredential(this.ApiKey ?? throw new ConfigurationNotFoundException("OpenAI:ApiKey"))) : !string.IsNullOrWhiteSpace(this.ApiKey) ? OpenAIClientProvider.ForAzureOpenAI(new ApiKeyCredential(this.ApiKey), new Uri(this.Endpoint!)) : OpenAIClientProvider.ForAzureOpenAI(new AzureCliCredential(), new Uri(this.Endpoint!)); + } /// /// Common method to write formatted agent chat content to the console. @@ -91,13 +106,17 @@ protected void WriteAgentChatMessage(ChatMessageContent message) { WriteUsage(assistantUsage.TotalTokenCount, assistantUsage.InputTokenCount, assistantUsage.OutputTokenCount); } + else if (usage is RunStepCompletionUsage agentUsage) + { + WriteUsage(agentUsage.TotalTokens, agentUsage.PromptTokens, agentUsage.CompletionTokens); + } else if (usage is ChatTokenUsage chatUsage) { WriteUsage(chatUsage.TotalTokenCount, chatUsage.InputTokenCount, chatUsage.OutputTokenCount); } } - void WriteUsage(int totalTokens, int inputTokens, int outputTokens) + void WriteUsage(long totalTokens, long inputTokens, long outputTokens) { Console.WriteLine($" [Usage] Tokens: {totalTokens}, Input: {inputTokens}, Output: {outputTokens}"); } diff --git a/dotnet/src/InternalUtilities/samples/InternalUtilities/TestConfiguration.cs b/dotnet/src/InternalUtilities/samples/InternalUtilities/TestConfiguration.cs index 5e9e0c925660..6ca016248073 100644 --- a/dotnet/src/InternalUtilities/samples/InternalUtilities/TestConfiguration.cs +++ b/dotnet/src/InternalUtilities/samples/InternalUtilities/TestConfiguration.cs @@ -24,6 +24,7 @@ public static void Initialize(IConfigurationRoot configRoot) public static OnnxConfig Onnx => LoadSection(); public static AzureOpenAIConfig AzureOpenAI => LoadSection(); public static AzureAIInferenceConfig AzureAIInference => LoadSection(); + public static AzureAIConfig AzureAI => LoadSection(); public static AzureOpenAIConfig AzureOpenAIImages => LoadSection(); public static AzureOpenAIEmbeddingsConfig AzureOpenAIEmbeddings => LoadSection(); public static AzureAISearchConfig AzureAISearch => LoadSection(); @@ -91,6 +92,12 @@ public class OnnxConfig public string EmbeddingVocabPath { get; set; } } + public class AzureAIConfig + { + public string ConnectionString { get; set; } + public string ChatModelId { get; set; } + } + public class AzureOpenAIConfig { public string ServiceId { get; set; } diff --git a/dotnet/src/SemanticKernel.Abstractions/CompatibilitySuppressions.xml b/dotnet/src/SemanticKernel.Abstractions/CompatibilitySuppressions.xml new file mode 100644 index 000000000000..710187694735 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/CompatibilitySuppressions.xml @@ -0,0 +1,32 @@ + + + + + CP0001 + T:Microsoft.SemanticKernel.Agents.OpenAI.AnnotationContent + lib/net8.0/Microsoft.SemanticKernel.Abstractions.dll + lib/net8.0/Microsoft.SemanticKernel.Abstractions.dll + true + + + CP0001 + T:Microsoft.SemanticKernel.Agents.OpenAI.StreamingAnnotationContent + lib/net8.0/Microsoft.SemanticKernel.Abstractions.dll + lib/net8.0/Microsoft.SemanticKernel.Abstractions.dll + true + + + CP0001 + T:Microsoft.SemanticKernel.Agents.OpenAI.AnnotationContent + lib/netstandard2.0/Microsoft.SemanticKernel.Abstractions.dll + lib/netstandard2.0/Microsoft.SemanticKernel.Abstractions.dll + true + + + CP0001 + T:Microsoft.SemanticKernel.Agents.OpenAI.StreamingAnnotationContent + lib/netstandard2.0/Microsoft.SemanticKernel.Abstractions.dll + lib/netstandard2.0/Microsoft.SemanticKernel.Abstractions.dll + true + + \ No newline at end of file diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/AnnotationContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/AnnotationContent.cs index f751ea6fc448..f0e71963fc80 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Contents/AnnotationContent.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Contents/AnnotationContent.cs @@ -3,7 +3,7 @@ using System.Diagnostics.CodeAnalysis; using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Agents.OpenAI; +namespace Microsoft.SemanticKernel.Agents; /// /// Content type to support message annotations. diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/FileReferenceContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/FileReferenceContent.cs index 925d74d0c731..641e375b2839 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Contents/FileReferenceContent.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Contents/FileReferenceContent.cs @@ -16,6 +16,15 @@ public class FileReferenceContent : KernelContent /// public string FileId { get; init; } = string.Empty; + /// + /// An optional tool association. + /// + /// + /// Tool definition depends upon the context within which the content is consumed. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public IReadOnlyList? Tools { get; init; } + /// /// Initializes a new instance of the class. /// diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/KernelContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/KernelContent.cs index 8dbcc00eb25d..525472d90047 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Contents/KernelContent.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Contents/KernelContent.cs @@ -2,7 +2,7 @@ using System.Collections.Generic; using System.Text.Json.Serialization; -using Microsoft.SemanticKernel.Agents.OpenAI; +using Microsoft.SemanticKernel.Agents; namespace Microsoft.SemanticKernel; diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingAnnotationContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingAnnotationContent.cs index 609f94a87180..5c5aa5780303 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingAnnotationContent.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingAnnotationContent.cs @@ -4,7 +4,7 @@ using System.Text; using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Agents.OpenAI; +namespace Microsoft.SemanticKernel.Agents; /// /// Content type to support message annotations. diff --git a/dotnet/src/SemanticKernel.UnitTests/Contents/AnnotationContentTests.cs b/dotnet/src/SemanticKernel.UnitTests/Contents/AnnotationContentTests.cs index 524caed4ff29..dac3d2c5abc6 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Contents/AnnotationContentTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Contents/AnnotationContentTests.cs @@ -1,5 +1,5 @@ // Copyright (c) Microsoft. All rights reserved. -using Microsoft.SemanticKernel.Agents.OpenAI; +using Microsoft.SemanticKernel.Agents; using Xunit; namespace SemanticKernel.UnitTests.Contents; diff --git a/dotnet/src/SemanticKernel.UnitTests/Contents/ChatMessageContentTests.cs b/dotnet/src/SemanticKernel.UnitTests/Contents/ChatMessageContentTests.cs index cd753a15e201..1bc89f9aa5fa 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Contents/ChatMessageContentTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Contents/ChatMessageContentTests.cs @@ -6,7 +6,7 @@ using System.Text; using System.Text.Json; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Agents.OpenAI; +using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.ChatCompletion; using Xunit; @@ -202,6 +202,7 @@ public void ItCanBeSerializeAndDeserialized() new FunctionCallContent("function-name", "plugin-name", "function-id", new KernelArguments { ["parameter"] = "argument" }), new FunctionResultContent(new FunctionCallContent("function-name", "plugin-name", "function-id"), "function-result"), new FileReferenceContent(fileId: "file-id-1") { ModelId = "model-7", Metadata = new Dictionary() { ["metadata-key-7"] = "metadata-value-7" } }, + new FileReferenceContent(fileId: "file-id-2") { Tools = ["a", "b", "c"] }, new AnnotationContent("quote-8") { ModelId = "model-8", FileId = "file-id-2", StartIndex = 2, EndIndex = 24, Metadata = new Dictionary() { ["metadata-key-8"] = "metadata-value-8" } }, ]; @@ -301,15 +302,21 @@ public void ItCanBeSerializeAndDeserialized() Assert.Equal("function-id", functionResultContent.CallId); Assert.Equal("plugin-name", functionResultContent.PluginName); - var fileReferenceContent = deserializedMessage.Items[8] as FileReferenceContent; - Assert.NotNull(fileReferenceContent); - Assert.Equal("file-id-1", fileReferenceContent.FileId); - Assert.Equal("model-7", fileReferenceContent.ModelId); - Assert.NotNull(fileReferenceContent.Metadata); - Assert.Single(fileReferenceContent.Metadata); - Assert.Equal("metadata-value-7", fileReferenceContent.Metadata["metadata-key-7"]?.ToString()); - - var annotationContent = deserializedMessage.Items[9] as AnnotationContent; + var fileReferenceContent1 = deserializedMessage.Items[8] as FileReferenceContent; + Assert.NotNull(fileReferenceContent1); + Assert.Equal("file-id-1", fileReferenceContent1.FileId); + Assert.Equal("model-7", fileReferenceContent1.ModelId); + Assert.NotNull(fileReferenceContent1.Metadata); + Assert.Single(fileReferenceContent1.Metadata); + Assert.Equal("metadata-value-7", fileReferenceContent1.Metadata["metadata-key-7"]?.ToString()); + + var fileReferenceContent2 = deserializedMessage.Items[9] as FileReferenceContent; + Assert.NotNull(fileReferenceContent2); + Assert.Equal("file-id-2", fileReferenceContent2.FileId); + Assert.NotNull(fileReferenceContent2.Tools); + Assert.Equal(3, fileReferenceContent2.Tools.Count); + + var annotationContent = deserializedMessage.Items[10] as AnnotationContent; Assert.NotNull(annotationContent); Assert.Equal("file-id-2", annotationContent.FileId); Assert.Equal("quote-8", annotationContent.Quote); diff --git a/dotnet/src/SemanticKernel.UnitTests/Contents/FileReferenceContentTests.cs b/dotnet/src/SemanticKernel.UnitTests/Contents/FileReferenceContentTests.cs index 6b55818c9473..b698fa528bff 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Contents/FileReferenceContentTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Contents/FileReferenceContentTests.cs @@ -21,6 +21,7 @@ public void VerifyFileReferenceContentInitialState() Assert.Empty(definition.FileId); } + /// /// Verify usage. /// @@ -30,5 +31,20 @@ public void VerifyFileReferenceContentUsage() FileReferenceContent definition = new(fileId: "testfile"); Assert.Equal("testfile", definition.FileId); + Assert.Null(definition.Tools); + } + + /// + /// Verify usage. + /// + [Fact] + public void VerifyFileReferenceToolUsage() + { + FileReferenceContent definition = new(fileId: "testfile") { Tools = new[] { "a", "b", "c" } }; + + Assert.Equal("testfile", definition.FileId); + Assert.NotNull(definition.Tools); + Assert.Equal(3, definition.Tools.Count); + Assert.Equivalent(new[] { "a", "b", "c" }, definition.Tools); } } diff --git a/dotnet/src/SemanticKernel.UnitTests/Contents/StreamingAnnotationContentTests.cs b/dotnet/src/SemanticKernel.UnitTests/Contents/StreamingAnnotationContentTests.cs index eb954752ce4b..46da513e4a7c 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Contents/StreamingAnnotationContentTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Contents/StreamingAnnotationContentTests.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. using System.Text; -using Microsoft.SemanticKernel.Agents.OpenAI; +using Microsoft.SemanticKernel.Agents; using Xunit; namespace SemanticKernel.UnitTests.Contents;