From a5570b180ea042e6ef2b45001688fb3bdbf4f4d1 Mon Sep 17 00:00:00 2001
From: Chris <66376200+crickman@users.noreply.github.com>
Date: Mon, 23 Sep 2024 08:01:37 -0700
Subject: [PATCH] .Net Agents - Support Prompt Template (#8631)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
### Motivation and Context
Support ability to paramaterize agent instructions (à la
`IKernelTemplate`).
This also includes the ability to initialize an agent using a _yaml_
based prompt-template.
### Description
**Core:**
- Update `ChatCompletionAgent` and `OpenAIAssistantAgent` to support
initialization via `PromptTemplateConfig`.
- Also allow both agent types to support _semantic-kernel_ template
format for string based `Instructions`
- Formalize templating contracts on `KernelAgent` (base class)
- Added _GettingStartedWithAgents_ samples using _yaml_ template:
_Step01_ & _Step08_
- Added templating samples under `Concepts/Agents` to explore full range
of templating patterns
**Structural:**
- Split off `OpenAIAssistantCapabilities` from
`OpenAIAssistantDefinition` to clarify creation via
`PromptTemplateConfig`
- Re-ordered method parameters for `OpenAIAssistant.CreateAsync` and
`RetrieveAsync` to rationalize functional grouping and optionality.
- Externalized internal `AssistantCreationOptionsFactory` (from private
`OpenAIAssistant` method) for clarity and ease of testing
- Persisting _template-format_ as part of assistant metadata for
retrieval case (in the event of any ambiguity)
**Additionally:**
- Updated/added comments where appropriate
- Updated sample conventions (argument labels, explicit types)
- Updated all call sites for creating `OpenAIAssistantAgent` (due to
parameter ordering)
- Added test coverage where able
### Contribution Checklist
- [X] The code builds clean without any errors or warnings
- [X] The PR follows the [SK Contribution
Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md)
and the [pre-submission formatting
script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts)
raises no violations
- [X] All unit tests pass, and I have added new tests where possible
- [X] I didn't break anyone :smile:
---------
Co-authored-by: Evan Mattson <35585003+moonbox3@users.noreply.github.com>
Co-authored-by: Dmytro Struk <13853051+dmytrostruk@users.noreply.github.com>
---
.../Agents/ChatCompletion_ServiceSelection.cs | 2 +-
.../Agents/ChatCompletion_Templating.cs | 132 +++
.../Concepts/Agents/MixedChat_Agents.cs | 6 +-
.../Concepts/Agents/MixedChat_Files.cs | 6 +-
.../Concepts/Agents/MixedChat_Images.cs | 6 +-
.../Concepts/Agents/MixedChat_Reset.cs | 6 +-
.../Concepts/Agents/MixedChat_Streaming.cs | 6 +-
.../Agents/OpenAIAssistant_ChartMaker.cs | 6 +-
.../OpenAIAssistant_FileManipulation.cs | 6 +-
.../Agents/OpenAIAssistant_Streaming.cs | 8 +-
.../Agents/OpenAIAssistant_Templating.cs | 142 +++
.../GettingStartedWithAgents.csproj | 3 +-
.../Resources/GenerateStory.yaml | 17 +
.../GettingStartedWithAgents/Step01_Agent.cs | 48 +
.../Step06_DependencyInjection.cs | 2 +-
.../Step08_Assistant.cs | 62 +-
.../Step09_Assistant_Vision.cs | 6 +-
.../Step10_AssistantTool_CodeInterpreter.cs | 6 +-
.../Step11_AssistantTool_FileSearch.cs | 6 +-
dotnet/src/Agents/Abstractions/KernelAgent.cs | 46 +-
dotnet/src/Agents/Core/ChatCompletionAgent.cs | 44 +-
.../src/Agents/Core/ChatHistoryKernelAgent.cs | 29 +-
.../AssistantCreationOptionsFactory.cs | 77 ++
.../Internal/AssistantRunOptionsFactory.cs | 4 +-
.../OpenAI/Internal/AssistantThreadActions.cs | 52 +-
.../src/Agents/OpenAI/OpenAIAssistantAgent.cs | 158 +--
.../OpenAI/OpenAIAssistantCapabilities.cs | 94 ++
.../Agents/OpenAI/OpenAIAssistantChannel.cs | 2 +
.../OpenAI/OpenAIAssistantDefinition.cs | 90 +-
.../src/Agents/OpenAI/OpenAIClientProvider.cs | 2 +-
.../AssistantRunOptionsFactoryTests.cs | 10 +-
.../OpenAI/OpenAIAssistantAgentTests.cs | 908 ++++++------------
.../OpenAI/OpenAIAssistantDefinitionTests.cs | 22 +
.../OpenAI/OpenAIAssistantResponseContent.cs | 741 ++++++++++++++
.../OpenAI/OpenAIClientProviderTests.cs | 65 +-
.../Agents/MixedAgentTests.cs | 4 +-
.../Agents/OpenAIAssistantAgentTests.cs | 8 +-
37 files changed, 1949 insertions(+), 883 deletions(-)
create mode 100644 dotnet/samples/Concepts/Agents/ChatCompletion_Templating.cs
create mode 100644 dotnet/samples/Concepts/Agents/OpenAIAssistant_Templating.cs
create mode 100644 dotnet/samples/GettingStartedWithAgents/Resources/GenerateStory.yaml
create mode 100644 dotnet/src/Agents/OpenAI/Internal/AssistantCreationOptionsFactory.cs
create mode 100644 dotnet/src/Agents/OpenAI/OpenAIAssistantCapabilities.cs
create mode 100644 dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantResponseContent.cs
diff --git a/dotnet/samples/Concepts/Agents/ChatCompletion_ServiceSelection.cs b/dotnet/samples/Concepts/Agents/ChatCompletion_ServiceSelection.cs
index 82b2ca28bce0..8921dd2a6f9e 100644
--- a/dotnet/samples/Concepts/Agents/ChatCompletion_ServiceSelection.cs
+++ b/dotnet/samples/Concepts/Agents/ChatCompletion_ServiceSelection.cs
@@ -8,7 +8,7 @@ namespace Agents;
///
/// Demonstrate service selection for through setting service-id
-/// on and also providing override
+/// on and also providing override
/// when calling
///
public class ChatCompletion_ServiceSelection(ITestOutputHelper output) : BaseTest(output)
diff --git a/dotnet/samples/Concepts/Agents/ChatCompletion_Templating.cs b/dotnet/samples/Concepts/Agents/ChatCompletion_Templating.cs
new file mode 100644
index 000000000000..1bcf2adbe758
--- /dev/null
+++ b/dotnet/samples/Concepts/Agents/ChatCompletion_Templating.cs
@@ -0,0 +1,132 @@
+// Copyright (c) Microsoft. All rights reserved.
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.PromptTemplates.Handlebars;
+using Microsoft.SemanticKernel.PromptTemplates.Liquid;
+
+namespace Agents;
+
+///
+/// Demonstrate parameterized template instruction for .
+///
+public class ChatCompletion_Templating(ITestOutputHelper output) : BaseAgentsTest(output)
+{
+ private readonly static (string Input, string? Style)[] s_inputs =
+ [
+ (Input: "Home cooking is great.", Style: null),
+ (Input: "Talk about world peace.", Style: "iambic pentameter"),
+ (Input: "Say something about doing your best.", Style: "e. e. cummings"),
+ (Input: "What do you think about having fun?", Style: "old school rap")
+ ];
+
+ [Fact]
+ public async Task InvokeAgentWithInstructionsTemplateAsync()
+ {
+ // Instruction based template always processed by KernelPromptTemplateFactory
+ ChatCompletionAgent agent =
+ new()
+ {
+ Kernel = this.CreateKernelWithChatCompletion(),
+ Instructions =
+ """
+ Write a one verse poem on the requested topic in the style of {{$style}}.
+ Always state the requested style of the poem.
+ """,
+ Arguments = new KernelArguments()
+ {
+ {"style", "haiku"}
+ }
+ };
+
+ await InvokeChatCompletionAgentWithTemplateAsync(agent);
+ }
+
+ [Fact]
+ public async Task InvokeAgentWithKernelTemplateAsync()
+ {
+ // Default factory is KernelPromptTemplateFactory
+ await InvokeChatCompletionAgentWithTemplateAsync(
+ """
+ Write a one verse poem on the requested topic in the style of {{$style}}.
+ Always state the requested style of the poem.
+ """);
+ }
+
+ [Fact]
+ public async Task InvokeAgentWithHandlebarsTemplateAsync()
+ {
+ await InvokeChatCompletionAgentWithTemplateAsync(
+ """
+ Write a one verse poem on the requested topic in the style of {{style}}.
+ Always state the requested style of the poem.
+ """,
+ HandlebarsPromptTemplateFactory.HandlebarsTemplateFormat,
+ new HandlebarsPromptTemplateFactory());
+ }
+
+ [Fact]
+ public async Task InvokeAgentWithLiquidTemplateAsync()
+ {
+ await InvokeChatCompletionAgentWithTemplateAsync(
+ """
+ Write a one verse poem on the requested topic in the style of {{style}}.
+ Always state the requested style of the poem.
+ """,
+ LiquidPromptTemplateFactory.LiquidTemplateFormat,
+ new LiquidPromptTemplateFactory());
+ }
+
+ private async Task InvokeChatCompletionAgentWithTemplateAsync(
+ string instructionTemplate,
+ string? templateFormat = null,
+ IPromptTemplateFactory? templateFactory = null)
+ {
+ // Define the agent
+ PromptTemplateConfig templateConfig =
+ new()
+ {
+ Template = instructionTemplate,
+ TemplateFormat = templateFormat,
+ };
+ ChatCompletionAgent agent =
+ new(templateConfig, templateFactory)
+ {
+ Kernel = this.CreateKernelWithChatCompletion(),
+ Arguments = new KernelArguments()
+ {
+ {"style", "haiku"}
+ }
+ };
+
+ await InvokeChatCompletionAgentWithTemplateAsync(agent);
+ }
+
+ private async Task InvokeChatCompletionAgentWithTemplateAsync(ChatCompletionAgent agent)
+ {
+ ChatHistory chat = [];
+
+ foreach ((string input, string? style) in s_inputs)
+ {
+ // Add input to chat
+ ChatMessageContent request = new(AuthorRole.User, input);
+ chat.Add(request);
+ this.WriteAgentChatMessage(request);
+
+ KernelArguments? arguments = null;
+
+ if (!string.IsNullOrWhiteSpace(style))
+ {
+ // Override style template parameter
+ arguments = new() { { "style", style } };
+ }
+
+ // Process agent response
+ await foreach (ChatMessageContent message in agent.InvokeAsync(chat, arguments))
+ {
+ chat.Add(message);
+ this.WriteAgentChatMessage(message);
+ }
+ }
+ }
+}
diff --git a/dotnet/samples/Concepts/Agents/MixedChat_Agents.cs b/dotnet/samples/Concepts/Agents/MixedChat_Agents.cs
index 21b19c1d342c..159441147f77 100644
--- a/dotnet/samples/Concepts/Agents/MixedChat_Agents.cs
+++ b/dotnet/samples/Concepts/Agents/MixedChat_Agents.cs
@@ -46,14 +46,14 @@ public async Task ChatWithOpenAIAssistantAgentAndChatCompletionAgentAsync()
OpenAIAssistantAgent agentWriter =
await OpenAIAssistantAgent.CreateAsync(
- kernel: new(),
clientProvider: this.GetClientProvider(),
- definition: new(this.Model)
+ definition: new OpenAIAssistantDefinition(this.Model)
{
Instructions = CopyWriterInstructions,
Name = CopyWriterName,
Metadata = AssistantSampleMetadata,
- });
+ },
+ kernel: new Kernel());
// Create a chat for agent interaction.
AgentGroupChat chat =
diff --git a/dotnet/samples/Concepts/Agents/MixedChat_Files.cs b/dotnet/samples/Concepts/Agents/MixedChat_Files.cs
index 0219c25f7712..d35428ac3df4 100644
--- a/dotnet/samples/Concepts/Agents/MixedChat_Files.cs
+++ b/dotnet/samples/Concepts/Agents/MixedChat_Files.cs
@@ -34,14 +34,14 @@ await fileClient.UploadFileAsync(
// Define the agents
OpenAIAssistantAgent analystAgent =
await OpenAIAssistantAgent.CreateAsync(
- kernel: new(),
provider,
- new(this.Model)
+ definition: new OpenAIAssistantDefinition(this.Model)
{
EnableCodeInterpreter = true,
CodeInterpreterFileIds = [uploadFile.Id], // Associate uploaded file with assistant code-interpreter
Metadata = AssistantSampleMetadata,
- });
+ },
+ kernel: new Kernel());
ChatCompletionAgent summaryAgent =
new()
diff --git a/dotnet/samples/Concepts/Agents/MixedChat_Images.cs b/dotnet/samples/Concepts/Agents/MixedChat_Images.cs
index 437643e25574..35d040727e4a 100644
--- a/dotnet/samples/Concepts/Agents/MixedChat_Images.cs
+++ b/dotnet/samples/Concepts/Agents/MixedChat_Images.cs
@@ -29,15 +29,15 @@ public async Task AnalyzeDataAndGenerateChartAsync()
// Define the agents
OpenAIAssistantAgent analystAgent =
await OpenAIAssistantAgent.CreateAsync(
- kernel: new(),
provider,
- new(this.Model)
+ definition: new OpenAIAssistantDefinition(this.Model)
{
Instructions = AnalystInstructions,
Name = AnalystName,
EnableCodeInterpreter = true,
Metadata = AssistantSampleMetadata,
- });
+ },
+ kernel: new Kernel());
ChatCompletionAgent summaryAgent =
new()
diff --git a/dotnet/samples/Concepts/Agents/MixedChat_Reset.cs b/dotnet/samples/Concepts/Agents/MixedChat_Reset.cs
index f9afcc55b7f5..7c9a2490d3e0 100644
--- a/dotnet/samples/Concepts/Agents/MixedChat_Reset.cs
+++ b/dotnet/samples/Concepts/Agents/MixedChat_Reset.cs
@@ -25,13 +25,13 @@ public async Task ResetChatAsync()
// Define the agents
OpenAIAssistantAgent assistantAgent =
await OpenAIAssistantAgent.CreateAsync(
- kernel: new(),
provider,
- new(this.Model)
+ definition: new OpenAIAssistantDefinition(this.Model)
{
Name = nameof(OpenAIAssistantAgent),
Instructions = AgentInstructions,
- });
+ },
+ kernel: new Kernel());
ChatCompletionAgent chatAgent =
new()
diff --git a/dotnet/samples/Concepts/Agents/MixedChat_Streaming.cs b/dotnet/samples/Concepts/Agents/MixedChat_Streaming.cs
index de88a242ee03..c9364bc2b2a9 100644
--- a/dotnet/samples/Concepts/Agents/MixedChat_Streaming.cs
+++ b/dotnet/samples/Concepts/Agents/MixedChat_Streaming.cs
@@ -47,14 +47,14 @@ public async Task UseStreamingAgentChatAsync()
OpenAIAssistantAgent agentWriter =
await OpenAIAssistantAgent.CreateAsync(
- kernel: new(),
clientProvider: this.GetClientProvider(),
- definition: new(this.Model)
+ definition: new OpenAIAssistantDefinition(this.Model)
{
Instructions = CopyWriterInstructions,
Name = CopyWriterName,
Metadata = AssistantSampleMetadata,
- });
+ },
+ kernel: new Kernel());
// Create a chat for agent interaction.
AgentGroupChat chat =
diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_ChartMaker.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_ChartMaker.cs
index cd81f7c4d187..76126e75a41e 100644
--- a/dotnet/samples/Concepts/Agents/OpenAIAssistant_ChartMaker.cs
+++ b/dotnet/samples/Concepts/Agents/OpenAIAssistant_ChartMaker.cs
@@ -26,15 +26,15 @@ public async Task GenerateChartWithOpenAIAssistantAgentAsync()
// Define the agent
OpenAIAssistantAgent agent =
await OpenAIAssistantAgent.CreateAsync(
- kernel: new(),
provider,
- new(this.Model)
+ definition: new OpenAIAssistantDefinition(this.Model)
{
Instructions = AgentInstructions,
Name = AgentName,
EnableCodeInterpreter = true,
Metadata = AssistantSampleMetadata,
- });
+ },
+ kernel: new());
// Create a chat for agent interaction.
AgentGroupChat chat = new();
diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileManipulation.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileManipulation.cs
index dc4af2ad2743..c5324c6aa7ed 100644
--- a/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileManipulation.cs
+++ b/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileManipulation.cs
@@ -29,14 +29,14 @@ await fileClient.UploadFileAsync(
// Define the agent
OpenAIAssistantAgent agent =
await OpenAIAssistantAgent.CreateAsync(
- kernel: new(),
provider,
- new(this.Model)
+ definition: new OpenAIAssistantDefinition(this.Model)
{
EnableCodeInterpreter = true,
CodeInterpreterFileIds = [uploadFile.Id],
Metadata = AssistantSampleMetadata,
- });
+ },
+ kernel: new Kernel());
// Create a chat for agent interaction.
AgentGroupChat chat = new();
diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_Streaming.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_Streaming.cs
index d3a757187429..127aa8ba5657 100644
--- a/dotnet/samples/Concepts/Agents/OpenAIAssistant_Streaming.cs
+++ b/dotnet/samples/Concepts/Agents/OpenAIAssistant_Streaming.cs
@@ -15,14 +15,14 @@ public class OpenAIAssistant_Streaming(ITestOutputHelper output) : BaseAgentsTes
private const string ParrotInstructions = "Repeat the user message in the voice of a pirate and then end with a parrot sound.";
[Fact]
- public async Task UseStreamingChatCompletionAgentAsync()
+ public async Task UseStreamingAssistantAgentAsync()
{
// Define the agent
OpenAIAssistantAgent agent =
await OpenAIAssistantAgent.CreateAsync(
kernel: new(),
clientProvider: this.GetClientProvider(),
- new(this.Model)
+ definition: new OpenAIAssistantDefinition(this.Model)
{
Instructions = ParrotInstructions,
Name = ParrotName,
@@ -42,7 +42,7 @@ await OpenAIAssistantAgent.CreateAsync(
}
[Fact]
- public async Task UseStreamingChatCompletionAgentWithPluginAsync()
+ public async Task UseStreamingAssistantAgentWithPluginAsync()
{
const string MenuInstructions = "Answer questions about the menu.";
@@ -51,7 +51,7 @@ public async Task UseStreamingChatCompletionAgentWithPluginAsync()
await OpenAIAssistantAgent.CreateAsync(
kernel: new(),
clientProvider: this.GetClientProvider(),
- new(this.Model)
+ definition: new OpenAIAssistantDefinition(this.Model)
{
Instructions = MenuInstructions,
Name = "Host",
diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_Templating.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_Templating.cs
new file mode 100644
index 000000000000..3937635203a4
--- /dev/null
+++ b/dotnet/samples/Concepts/Agents/OpenAIAssistant_Templating.cs
@@ -0,0 +1,142 @@
+// Copyright (c) Microsoft. All rights reserved.
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents.OpenAI;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.PromptTemplates.Handlebars;
+using Microsoft.SemanticKernel.PromptTemplates.Liquid;
+
+namespace Agents;
+
+///
+/// Demonstrate parameterized template instruction for .
+///
+public class OpenAIAssistant_Templating(ITestOutputHelper output) : BaseAgentsTest(output)
+{
+ private readonly static (string Input, string? Style)[] s_inputs =
+ [
+ (Input: "Home cooking is great.", Style: null),
+ (Input: "Talk about world peace.", Style: "iambic pentameter"),
+ (Input: "Say something about doing your best.", Style: "e. e. cummings"),
+ (Input: "What do you think about having fun?", Style: "old school rap")
+ ];
+
+ [Fact]
+ public async Task InvokeAgentWithInstructionsAsync()
+ {
+ // Instruction based template always proceseed by KernelPromptTemplateFactory
+ OpenAIAssistantAgent agent = await OpenAIAssistantAgent.CreateAsync(
+ clientProvider: this.GetClientProvider(),
+ definition: new OpenAIAssistantDefinition(this.Model)
+ {
+ Instructions =
+ """
+ Write a one verse poem on the requested topic in the styles of {{$style}}.
+ Always state the requested style of the poem.
+ """,
+ Metadata = AssistantSampleMetadata
+ },
+ kernel: new Kernel(),
+ defaultArguments: new KernelArguments()
+ {
+ {"style", "haiku"}
+ });
+
+ await InvokeAssistantAgentWithTemplateAsync(agent);
+ }
+
+ [Fact]
+ public async Task InvokeAgentWithKernelTemplateAsync()
+ {
+ // Default factory is KernelPromptTemplateFactory
+ await InvokeAssistantAgentWithTemplateAsync(
+ """
+ Write a one verse poem on the requested topic in the styles of {{$style}}.
+ Always state the requested style of the poem.
+ """);
+ }
+
+ [Fact]
+ public async Task InvokeAgentWithHandlebarsTemplateAsync()
+ {
+ await InvokeAssistantAgentWithTemplateAsync(
+ """
+ Write a one verse poem on the requested topic in the styles of {{style}}.
+ Always state the requested style of the poem.
+ """,
+ HandlebarsPromptTemplateFactory.HandlebarsTemplateFormat,
+ new HandlebarsPromptTemplateFactory());
+ }
+
+ [Fact]
+ public async Task InvokeAgentWithLiquidTemplateAsync()
+ {
+ await InvokeAssistantAgentWithTemplateAsync(
+ """
+ Write a one verse poem on the requested topic in the styles of {{style}}.
+ Always state the requested style of the poem.
+ """,
+ LiquidPromptTemplateFactory.LiquidTemplateFormat,
+ new LiquidPromptTemplateFactory());
+ }
+
+ private async Task InvokeAssistantAgentWithTemplateAsync(
+ string instructionTemplate,
+ string? templateFormat = null,
+ IPromptTemplateFactory? templateFactory = null)
+ {
+ // Define the agent
+ OpenAIAssistantAgent agent = await OpenAIAssistantAgent.CreateFromTemplateAsync(
+ clientProvider: this.GetClientProvider(),
+ capabilities: new OpenAIAssistantCapabilities(this.Model)
+ {
+ Metadata = AssistantSampleMetadata
+ },
+ kernel: new Kernel(),
+ defaultArguments: new KernelArguments()
+ {
+ {"style", "haiku"}
+ },
+ templateConfig: new PromptTemplateConfig
+ {
+ Template = instructionTemplate,
+ TemplateFormat = templateFormat,
+ },
+ templateFactory);
+
+ await InvokeAssistantAgentWithTemplateAsync(agent);
+ }
+
+ private async Task InvokeAssistantAgentWithTemplateAsync(OpenAIAssistantAgent agent)
+ {
+ // Create a thread for the agent conversation.
+ string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata });
+
+ try
+ {
+ // Respond to user input
+ foreach ((string input, string? style) in s_inputs)
+ {
+ ChatMessageContent request = new(AuthorRole.User, input);
+ await agent.AddChatMessageAsync(threadId, request);
+ this.WriteAgentChatMessage(request);
+
+ KernelArguments? arguments = null;
+
+ if (!string.IsNullOrWhiteSpace(style))
+ {
+ arguments = new() { { "style", style } };
+ }
+
+ await foreach (ChatMessageContent message in agent.InvokeAsync(threadId, arguments))
+ {
+ this.WriteAgentChatMessage(message);
+ }
+ }
+ }
+ finally
+ {
+ await agent.DeleteThreadAsync(threadId);
+ await agent.DeleteAsync();
+ }
+ }
+}
diff --git a/dotnet/samples/GettingStartedWithAgents/GettingStartedWithAgents.csproj b/dotnet/samples/GettingStartedWithAgents/GettingStartedWithAgents.csproj
index df9e025b678f..23b3aa7989a4 100644
--- a/dotnet/samples/GettingStartedWithAgents/GettingStartedWithAgents.csproj
+++ b/dotnet/samples/GettingStartedWithAgents/GettingStartedWithAgents.csproj
@@ -44,6 +44,7 @@
+
@@ -58,7 +59,7 @@
-
+
diff --git a/dotnet/samples/GettingStartedWithAgents/Resources/GenerateStory.yaml b/dotnet/samples/GettingStartedWithAgents/Resources/GenerateStory.yaml
new file mode 100644
index 000000000000..fc5ecd88f34e
--- /dev/null
+++ b/dotnet/samples/GettingStartedWithAgents/Resources/GenerateStory.yaml
@@ -0,0 +1,17 @@
+name: GenerateStory
+template: |
+ Tell a story about {{$topic}} that is {{$length}} sentences long.
+template_format: semantic-kernel
+description: A function that generates a story about a topic.
+input_variables:
+ - name: topic
+ description: The topic of the story.
+ is_required: true
+ - name: length
+ description: The number of sentences in the story.
+ is_required: true
+output_variable:
+ description: The generated story.
+execution_settings:
+ default:
+ temperature: 0.6
diff --git a/dotnet/samples/GettingStartedWithAgents/Step01_Agent.cs b/dotnet/samples/GettingStartedWithAgents/Step01_Agent.cs
index bc5bee5249e5..dfd6aeb22fb3 100644
--- a/dotnet/samples/GettingStartedWithAgents/Step01_Agent.cs
+++ b/dotnet/samples/GettingStartedWithAgents/Step01_Agent.cs
@@ -2,6 +2,7 @@
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Agents;
using Microsoft.SemanticKernel.ChatCompletion;
+using Resources;
namespace GettingStarted;
@@ -17,6 +18,8 @@ public class Step01_Agent(ITestOutputHelper output) : BaseAgentsTest(output)
[Fact]
public async Task UseSingleChatCompletionAgentAsync()
{
+ Kernel kernel = this.CreateKernelWithChatCompletion();
+
// Define the agent
ChatCompletionAgent agent =
new()
@@ -49,4 +52,49 @@ async Task InvokeAgentAsync(string input)
}
}
}
+
+ [Fact]
+ public async Task UseTemplateForChatCompletionAgentAsync()
+ {
+ // Define the agent
+ string generateStoryYaml = EmbeddedResource.Read("GenerateStory.yaml");
+ PromptTemplateConfig templateConfig = KernelFunctionYaml.ToPromptTemplateConfig(generateStoryYaml);
+
+ // Instructions, Name and Description properties defined via the config.
+ ChatCompletionAgent agent =
+ new(templateConfig, new KernelPromptTemplateFactory())
+ {
+ Kernel = this.CreateKernelWithChatCompletion(),
+ Arguments = new KernelArguments()
+ {
+ { "topic", "Dog" },
+ { "length", "3" },
+ }
+ };
+
+ /// Create the chat history to capture the agent interaction.
+ ChatHistory chat = [];
+
+ // Invoke the agent with the default arguments.
+ await InvokeAgentAsync();
+
+ // Invoke the agent with the override arguments.
+ await InvokeAgentAsync(
+ new()
+ {
+ { "topic", "Cat" },
+ { "length", "3" },
+ });
+
+ // Local function to invoke agent and display the conversation messages.
+ async Task InvokeAgentAsync(KernelArguments? arguments = null)
+ {
+ await foreach (ChatMessageContent content in agent.InvokeAsync(chat, arguments))
+ {
+ chat.Add(content);
+
+ WriteAgentChatMessage(content);
+ }
+ }
+ }
}
diff --git a/dotnet/samples/GettingStartedWithAgents/Step06_DependencyInjection.cs b/dotnet/samples/GettingStartedWithAgents/Step06_DependencyInjection.cs
index ca8089a9130f..5beb969bf090 100644
--- a/dotnet/samples/GettingStartedWithAgents/Step06_DependencyInjection.cs
+++ b/dotnet/samples/GettingStartedWithAgents/Step06_DependencyInjection.cs
@@ -58,7 +58,7 @@ public async Task UseDependencyInjectionToCreateAgentAsync()
{
Instructions = TutorInstructions,
Name = TutorName,
- Kernel = sp.GetRequiredService(),
+ Kernel = sp.GetRequiredService().Clone(),
});
// Create a service provider for resolving registered services
diff --git a/dotnet/samples/GettingStartedWithAgents/Step08_Assistant.cs b/dotnet/samples/GettingStartedWithAgents/Step08_Assistant.cs
index ba4ab065c2a6..32c03a40a638 100644
--- a/dotnet/samples/GettingStartedWithAgents/Step08_Assistant.cs
+++ b/dotnet/samples/GettingStartedWithAgents/Step08_Assistant.cs
@@ -4,6 +4,7 @@
using Microsoft.SemanticKernel.Agents;
using Microsoft.SemanticKernel.Agents.OpenAI;
using Microsoft.SemanticKernel.ChatCompletion;
+using Resources;
namespace GettingStarted;
@@ -22,14 +23,14 @@ public async Task UseSingleAssistantAgentAsync()
// Define the agent
OpenAIAssistantAgent agent =
await OpenAIAssistantAgent.CreateAsync(
- kernel: new(),
clientProvider: this.GetClientProvider(),
- new(this.Model)
+ definition: new OpenAIAssistantDefinition(this.Model)
{
Instructions = HostInstructions,
Name = HostName,
Metadata = AssistantSampleMetadata,
- });
+ },
+ kernel: new Kernel());
// Initialize plugin and add to the agent's Kernel (same as direct Kernel usage).
KernelPlugin plugin = KernelPluginFactory.CreateFromType();
@@ -66,6 +67,61 @@ async Task InvokeAgentAsync(string input)
}
}
+ [Fact]
+ public async Task UseTemplateForAssistantAgentAsync()
+ {
+ // Define the agent
+ string generateStoryYaml = EmbeddedResource.Read("GenerateStory.yaml");
+ PromptTemplateConfig templateConfig = KernelFunctionYaml.ToPromptTemplateConfig(generateStoryYaml);
+
+ // Instructions, Name and Description properties defined via the config.
+ OpenAIAssistantAgent agent =
+ await OpenAIAssistantAgent.CreateFromTemplateAsync(
+ clientProvider: this.GetClientProvider(),
+ capabilities: new OpenAIAssistantCapabilities(this.Model)
+ {
+ Metadata = AssistantSampleMetadata,
+ },
+ kernel: new Kernel(),
+ defaultArguments: new KernelArguments()
+ {
+ { "topic", "Dog" },
+ { "length", "3" },
+ },
+ templateConfig);
+
+ // Create a thread for the agent conversation.
+ string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata });
+
+ try
+ {
+ // Invoke the agent with the default arguments.
+ await InvokeAgentAsync();
+
+ // Invoke the agent with the override arguments.
+ await InvokeAgentAsync(
+ new()
+ {
+ { "topic", "Cat" },
+ { "length", "3" },
+ });
+ }
+ finally
+ {
+ await agent.DeleteThreadAsync(threadId);
+ await agent.DeleteAsync();
+ }
+
+ // Local function to invoke agent and display the response.
+ async Task InvokeAgentAsync(KernelArguments? arguments = null)
+ {
+ await foreach (ChatMessageContent response in agent.InvokeAsync(threadId, arguments))
+ {
+ WriteAgentChatMessage(response);
+ }
+ }
+ }
+
private sealed class MenuPlugin
{
[KernelFunction, Description("Provides a list of specials from the menu.")]
diff --git a/dotnet/samples/GettingStartedWithAgents/Step09_Assistant_Vision.cs b/dotnet/samples/GettingStartedWithAgents/Step09_Assistant_Vision.cs
index 62845f2c4366..37983444cb40 100644
--- a/dotnet/samples/GettingStartedWithAgents/Step09_Assistant_Vision.cs
+++ b/dotnet/samples/GettingStartedWithAgents/Step09_Assistant_Vision.cs
@@ -23,12 +23,12 @@ public async Task UseSingleAssistantAgentAsync()
OpenAIClientProvider provider = this.GetClientProvider();
OpenAIAssistantAgent agent =
await OpenAIAssistantAgent.CreateAsync(
- kernel: new(),
provider,
- new(this.Model)
+ definition: new OpenAIAssistantDefinition(this.Model)
{
Metadata = AssistantSampleMetadata,
- });
+ },
+ kernel: new Kernel());
// Upload an image
await using Stream imageStream = EmbeddedResource.ReadStream("cat.jpg")!;
diff --git a/dotnet/samples/GettingStartedWithAgents/Step10_AssistantTool_CodeInterpreter.cs b/dotnet/samples/GettingStartedWithAgents/Step10_AssistantTool_CodeInterpreter.cs
index 1205771d66be..203009ffb561 100644
--- a/dotnet/samples/GettingStartedWithAgents/Step10_AssistantTool_CodeInterpreter.cs
+++ b/dotnet/samples/GettingStartedWithAgents/Step10_AssistantTool_CodeInterpreter.cs
@@ -16,13 +16,13 @@ public async Task UseCodeInterpreterToolWithAssistantAgentAsync()
// Define the agent
OpenAIAssistantAgent agent =
await OpenAIAssistantAgent.CreateAsync(
- kernel: new(),
clientProvider: this.GetClientProvider(),
- new(this.Model)
+ definition: new(this.Model)
{
EnableCodeInterpreter = true,
Metadata = AssistantSampleMetadata,
- });
+ },
+ kernel: new Kernel());
// Create a thread for the agent conversation.
string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata });
diff --git a/dotnet/samples/GettingStartedWithAgents/Step11_AssistantTool_FileSearch.cs b/dotnet/samples/GettingStartedWithAgents/Step11_AssistantTool_FileSearch.cs
index 581cfd355995..543b3df99165 100644
--- a/dotnet/samples/GettingStartedWithAgents/Step11_AssistantTool_FileSearch.cs
+++ b/dotnet/samples/GettingStartedWithAgents/Step11_AssistantTool_FileSearch.cs
@@ -20,13 +20,13 @@ public async Task UseFileSearchToolWithAssistantAgentAsync()
OpenAIClientProvider provider = this.GetClientProvider();
OpenAIAssistantAgent agent =
await OpenAIAssistantAgent.CreateAsync(
- kernel: new(),
clientProvider: this.GetClientProvider(),
- new(this.Model)
+ definition: new OpenAIAssistantDefinition(this.Model)
{
EnableFileSearch = true,
Metadata = AssistantSampleMetadata,
- });
+ },
+ kernel: new Kernel());
// Upload file - Using a table of fictional employees.
FileClient fileClient = provider.Client.GetFileClient();
diff --git a/dotnet/src/Agents/Abstractions/KernelAgent.cs b/dotnet/src/Agents/Abstractions/KernelAgent.cs
index 1df425972495..c747a6a30df6 100644
--- a/dotnet/src/Agents/Abstractions/KernelAgent.cs
+++ b/dotnet/src/Agents/Abstractions/KernelAgent.cs
@@ -1,4 +1,7 @@
// Copyright (c) Microsoft. All rights reserved.
+using System.Threading;
+using System.Threading.Tasks;
+
namespace Microsoft.SemanticKernel.Agents;
///
@@ -7,8 +10,20 @@ namespace Microsoft.SemanticKernel.Agents;
public abstract class KernelAgent : Agent
{
///
- /// The instructions of the agent (optional)
+ /// Arguments for the agent instruction parameters (optional).
+ ///
+ ///
+ /// Also includes .
+ ///
+ public KernelArguments? Arguments { get; init; }
+
+ ///
+ /// The instructions for the agent (optional)
///
+ ///
+ /// Instructions may be formatted in "semantic-kernel" template format.
+ /// ()
+ ///
public string? Instructions { get; init; }
///
@@ -18,4 +33,33 @@ public abstract class KernelAgent : Agent
/// Defaults to empty Kernel, but may be overridden.
///
public Kernel Kernel { get; init; } = new();
+
+ ///
+ /// A prompt-template based on the agent instructions.
+ ///
+ protected IPromptTemplate? Template { get; set; }
+
+ ///
+ /// Format the system instructions for the agent.
+ ///
+ /// The containing services, plugins, and other state for use by the agent.
+ /// Optional arguments to pass to the agents's invocation, including any .
+ /// The to monitor for cancellation requests. The default is .
+ /// The formatted system instructions for the agent
+ protected async Task FormatInstructionsAsync(Kernel kernel, KernelArguments? arguments, CancellationToken cancellationToken)
+ {
+ // If is not set, default instructions may be treated as "semantic-kernel" template.
+ if (this.Template == null)
+ {
+ if (string.IsNullOrWhiteSpace(this.Instructions))
+ {
+ return null;
+ }
+
+ KernelPromptTemplateFactory templateFactory = new(this.LoggerFactory);
+ this.Template = templateFactory.Create(new PromptTemplateConfig(this.Instructions!));
+ }
+
+ return await this.Template.RenderAsync(kernel, arguments, cancellationToken).ConfigureAwait(false);
+ }
}
diff --git a/dotnet/src/Agents/Core/ChatCompletionAgent.cs b/dotnet/src/Agents/Core/ChatCompletionAgent.cs
index 37982a17613c..4e54ce228079 100644
--- a/dotnet/src/Agents/Core/ChatCompletionAgent.cs
+++ b/dotnet/src/Agents/Core/ChatCompletionAgent.cs
@@ -13,11 +13,35 @@ namespace Microsoft.SemanticKernel.Agents;
/// A specialization based on .
///
///
-/// NOTE: Enable OpenAIPromptExecutionSettings.FunctionChoiceBehavior for agent plugins.
-/// ()
+/// NOTE: Enable for agent plugins.
+/// ()
///
public sealed class ChatCompletionAgent : ChatHistoryKernelAgent
{
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ public ChatCompletionAgent() { }
+
+ ///
+ /// Initializes a new instance of the class from
+ /// a .
+ ///
+ /// Prompt template configuration
+ /// An optional factory to produce the for the agent
+ ///
+ /// When 'templateFactory' parameter is not provided, the default is used.
+ ///
+ public ChatCompletionAgent(
+ PromptTemplateConfig templateConfig,
+ IPromptTemplateFactory? templateFactory = null)
+ {
+ this.Name = templateConfig.Name;
+ this.Description = templateConfig.Description;
+ this.Instructions = templateConfig.Template;
+ this.Template = templateFactory?.Create(templateConfig);
+ }
+
///
public override async IAsyncEnumerable InvokeAsync(
ChatHistory history,
@@ -30,7 +54,7 @@ public override async IAsyncEnumerable InvokeAsync(
(IChatCompletionService chatCompletionService, PromptExecutionSettings? executionSettings) = GetChatCompletionService(kernel, arguments);
- ChatHistory chat = this.SetupAgentChatHistory(history);
+ ChatHistory chat = await this.SetupAgentChatHistoryAsync(history, arguments, kernel, cancellationToken).ConfigureAwait(false);
int messageCount = chat.Count;
@@ -75,7 +99,7 @@ public override async IAsyncEnumerable InvokeStream
(IChatCompletionService chatCompletionService, PromptExecutionSettings? executionSettings) = GetChatCompletionService(kernel, arguments);
- ChatHistory chat = this.SetupAgentChatHistory(history);
+ ChatHistory chat = await this.SetupAgentChatHistoryAsync(history, arguments, kernel, cancellationToken).ConfigureAwait(false);
int messageCount = chat.Count;
@@ -133,13 +157,19 @@ internal static (IChatCompletionService service, PromptExecutionSettings? execut
return (chatCompletionService, executionSettings);
}
- private ChatHistory SetupAgentChatHistory(IReadOnlyList history)
+ private async Task SetupAgentChatHistoryAsync(
+ IReadOnlyList history,
+ KernelArguments? arguments,
+ Kernel kernel,
+ CancellationToken cancellationToken)
{
ChatHistory chat = [];
- if (!string.IsNullOrWhiteSpace(this.Instructions))
+ string? instructions = await this.FormatInstructionsAsync(kernel, arguments, cancellationToken).ConfigureAwait(false);
+
+ if (!string.IsNullOrWhiteSpace(instructions))
{
- chat.Add(new ChatMessageContent(AuthorRole.System, this.Instructions) { AuthorName = this.Name });
+ chat.Add(new ChatMessageContent(AuthorRole.System, instructions) { AuthorName = this.Name });
}
chat.AddRange(history);
diff --git a/dotnet/src/Agents/Core/ChatHistoryKernelAgent.cs b/dotnet/src/Agents/Core/ChatHistoryKernelAgent.cs
index b7beae216f36..0eee62920027 100644
--- a/dotnet/src/Agents/Core/ChatHistoryKernelAgent.cs
+++ b/dotnet/src/Agents/Core/ChatHistoryKernelAgent.cs
@@ -13,16 +13,11 @@ namespace Microsoft.SemanticKernel.Agents;
/// A specialization bound to a .
///
///
-/// NOTE: Enable OpenAIPromptExecutionSettings.FunctionChoiceBehavior for agent plugins.
-/// ()
+/// NOTE: Enable for agent plugins.
+/// ()
///
public abstract class ChatHistoryKernelAgent : KernelAgent
{
- ///
- /// Optional arguments for the agent.
- ///
- public KernelArguments? Arguments { get; init; }
-
///
/// Optionally specify a to reduce the history.
///
@@ -32,14 +27,28 @@ public abstract class ChatHistoryKernelAgent : KernelAgent
///
public IChatHistoryReducer? HistoryReducer { get; init; }
- ///
+ ///
+ /// Invoke the assistant to respond to the provided history.
+ ///
+ /// The conversation history.
+ /// Optional arguments to pass to the agents's invocation, including any .
+ /// The containing services, plugins, and other state for use by the agent.
+ /// The to monitor for cancellation requests. The default is .
+ /// Asynchronous enumeration of response messages.
public abstract IAsyncEnumerable InvokeAsync(
ChatHistory history,
KernelArguments? arguments = null,
Kernel? kernel = null,
CancellationToken cancellationToken = default);
- ///
+ ///
+ /// Invoke the assistant to respond to the provided history with streaming response.
+ ///
+ /// The conversation history.
+ /// Optional arguments to pass to the agents's invocation, including any .
+ /// The containing services, plugins, and other state for use by the agent.
+ /// The to monitor for cancellation requests. The default is .
+ /// Asynchronous enumeration of response messages.
public abstract IAsyncEnumerable InvokeStreamingAsync(
ChatHistory history,
KernelArguments? arguments = null,
@@ -51,7 +60,7 @@ public abstract IAsyncEnumerable InvokeStreamingAsy
///
/// The source history
/// The to monitor for cancellation requests. The default is .
- ///
+ /// True if reduction has occurred.
public Task ReduceAsync(ChatHistory history, CancellationToken cancellationToken = default) =>
history.ReduceAsync(this.HistoryReducer, cancellationToken);
diff --git a/dotnet/src/Agents/OpenAI/Internal/AssistantCreationOptionsFactory.cs b/dotnet/src/Agents/OpenAI/Internal/AssistantCreationOptionsFactory.cs
new file mode 100644
index 000000000000..532a8433c37c
--- /dev/null
+++ b/dotnet/src/Agents/OpenAI/Internal/AssistantCreationOptionsFactory.cs
@@ -0,0 +1,77 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System.Collections.Generic;
+using System.Text.Json;
+using OpenAI.Assistants;
+
+namespace Microsoft.SemanticKernel.Agents.OpenAI.Internal;
+
+///
+/// Produce the for an assistant according to the requested configuration.
+///
+internal static class AssistantCreationOptionsFactory
+{
+ public static AssistantCreationOptions CreateAssistantOptions(this PromptTemplateConfig templateConfig, OpenAIAssistantCapabilities capabilities)
+ {
+ AssistantCreationOptions assistantCreationOptions = capabilities.CreateAssistantCreationOptions(templateConfig.TemplateFormat);
+
+ assistantCreationOptions.Name = templateConfig.Name;
+ assistantCreationOptions.Instructions = templateConfig.Template;
+ assistantCreationOptions.Description = templateConfig.Description;
+
+ return assistantCreationOptions;
+ }
+
+ public static AssistantCreationOptions CreateAssistantOptions(this OpenAIAssistantDefinition definition)
+ {
+ AssistantCreationOptions assistantCreationOptions = definition.CreateAssistantCreationOptions(PromptTemplateConfig.SemanticKernelTemplateFormat);
+
+ assistantCreationOptions.Name = definition.Name;
+ assistantCreationOptions.Instructions = definition.Instructions;
+ assistantCreationOptions.Description = definition.Description;
+
+ return assistantCreationOptions;
+ }
+
+ private static AssistantCreationOptions CreateAssistantCreationOptions(this OpenAIAssistantCapabilities definition, string templateFormat)
+ {
+ AssistantCreationOptions assistantCreationOptions =
+ new()
+ {
+ ToolResources =
+ AssistantToolResourcesFactory.GenerateToolResources(
+ definition.EnableFileSearch ? definition.VectorStoreId : null,
+ definition.EnableCodeInterpreter ? definition.CodeInterpreterFileIds : null),
+ ResponseFormat = definition.EnableJsonResponse ? AssistantResponseFormat.JsonObject : AssistantResponseFormat.Auto,
+ Temperature = definition.Temperature,
+ NucleusSamplingFactor = definition.TopP,
+ };
+
+ if (definition.Metadata != null)
+ {
+ foreach (KeyValuePair item in definition.Metadata)
+ {
+ assistantCreationOptions.Metadata[item.Key] = item.Value;
+ }
+ }
+
+ assistantCreationOptions.Metadata[OpenAIAssistantAgent.TemplateMetadataKey] = templateFormat;
+
+ if (definition.ExecutionOptions != null)
+ {
+ string optionsJson = JsonSerializer.Serialize(definition.ExecutionOptions);
+ assistantCreationOptions.Metadata[OpenAIAssistantAgent.OptionsMetadataKey] = optionsJson;
+ }
+
+ if (definition.EnableCodeInterpreter)
+ {
+ assistantCreationOptions.Tools.Add(ToolDefinition.CreateCodeInterpreter());
+ }
+
+ if (definition.EnableFileSearch)
+ {
+ assistantCreationOptions.Tools.Add(ToolDefinition.CreateFileSearch());
+ }
+
+ return assistantCreationOptions;
+ }
+}
diff --git a/dotnet/src/Agents/OpenAI/Internal/AssistantRunOptionsFactory.cs b/dotnet/src/Agents/OpenAI/Internal/AssistantRunOptionsFactory.cs
index 9cef36da3fa3..94580ea8fc79 100644
--- a/dotnet/src/Agents/OpenAI/Internal/AssistantRunOptionsFactory.cs
+++ b/dotnet/src/Agents/OpenAI/Internal/AssistantRunOptionsFactory.cs
@@ -16,8 +16,9 @@ internal static class AssistantRunOptionsFactory
/// Produce by reconciling and .
///
/// The assistant definition
+ /// Instructions to use for the run
/// The run specific options
- public static RunCreationOptions GenerateOptions(OpenAIAssistantDefinition definition, OpenAIAssistantInvocationOptions? invocationOptions)
+ public static RunCreationOptions GenerateOptions(OpenAIAssistantDefinition definition, string? overrideInstructions, OpenAIAssistantInvocationOptions? invocationOptions)
{
int? truncationMessageCount = ResolveExecutionSetting(invocationOptions?.TruncationMessageCount, definition.ExecutionOptions?.TruncationMessageCount);
@@ -25,6 +26,7 @@ public static RunCreationOptions GenerateOptions(OpenAIAssistantDefinition defin
new()
{
AdditionalInstructions = invocationOptions?.AdditionalInstructions ?? definition.ExecutionOptions?.AdditionalInstructions,
+ InstructionsOverride = overrideInstructions,
MaxCompletionTokens = ResolveExecutionSetting(invocationOptions?.MaxCompletionTokens, definition.ExecutionOptions?.MaxCompletionTokens),
MaxPromptTokens = ResolveExecutionSetting(invocationOptions?.MaxPromptTokens, definition.ExecutionOptions?.MaxPromptTokens),
ModelOverride = invocationOptions?.ModelName,
diff --git a/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs b/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs
index 5eea56fddfc2..423b2f5cc98c 100644
--- a/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs
+++ b/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs
@@ -57,6 +57,7 @@ public static async Task CreateThreadAsync(AssistantClient client, OpenA
ThreadInitializationMessage threadMessage = new(
role: message.Role == AuthorRole.User ? MessageRole.User : MessageRole.Assistant,
content: AssistantMessageFactory.GetMessageContents(message));
+
createOptions.InitialMessages.Add(threadMessage);
}
}
@@ -153,9 +154,6 @@ public static async IAsyncEnumerable GetMessagesAsync(Assist
/// Optional arguments to pass to the agents's invocation, including any .
/// The to monitor for cancellation requests. The default is .
/// Asynchronous enumeration of messages.
- ///
- /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility.
- ///
public static async IAsyncEnumerable<(bool IsVisible, ChatMessageContent Message)> InvokeAsync(
OpenAIAssistantAgent agent,
AssistantClient client,
@@ -175,7 +173,9 @@ public static async IAsyncEnumerable GetMessagesAsync(Assist
ToolDefinition[]? tools = [.. agent.Tools, .. kernel.Plugins.SelectMany(p => p.Select(f => f.ToToolDefinition(p.Name)))];
- RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(agent.Definition, invocationOptions);
+ string? instructions = await agent.GetInstructionsAsync(kernel, arguments, cancellationToken).ConfigureAwait(false);
+
+ RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(agent.Definition, instructions, invocationOptions);
options.ToolsOverride.AddRange(tools);
@@ -185,7 +185,7 @@ public static async IAsyncEnumerable GetMessagesAsync(Assist
// Evaluate status and process steps and messages, as encountered.
HashSet processedStepIds = [];
- Dictionary functionSteps = [];
+ Dictionary functionSteps = [];
do
{
@@ -207,11 +207,6 @@ public static async IAsyncEnumerable GetMessagesAsync(Assist
// Execute functions in parallel and post results at once.
FunctionCallContent[] functionCalls = steps.SelectMany(step => ParseFunctionStep(agent, step)).ToArray();
- // Capture function-call for message processing
- foreach (FunctionCallContent functionCall in functionCalls)
- {
- functionSteps.Add(functionCall.Id!, functionCall);
- }
if (functionCalls.Length > 0)
{
// Emit function-call content
@@ -223,6 +218,12 @@ public static async IAsyncEnumerable GetMessagesAsync(Assist
// Block for function results
FunctionResultContent[] functionResults = await Task.WhenAll(functionResultTasks).ConfigureAwait(false);
+ // Capture function-call for message processing
+ foreach (FunctionResultContent functionCall in functionResults)
+ {
+ functionSteps.Add(functionCall.CallId!, functionCall);
+ }
+
// Process tool output
ToolOutput[] toolOutputs = GenerateToolOutputs(functionResults);
@@ -259,8 +260,8 @@ public static async IAsyncEnumerable GetMessagesAsync(Assist
// Process function result content
else if (toolCall.ToolKind == RunStepToolCallKind.Function)
{
- FunctionCallContent functionStep = functionSteps[toolCall.ToolCallId]; // Function step always captured on invocation
- content = GenerateFunctionResultContent(agent.GetName(), functionStep, toolCall.FunctionOutput);
+ FunctionResultContent functionStep = functionSteps[toolCall.ToolCallId]; // Function step always captured on invocation
+ content = GenerateFunctionResultContent(agent.GetName(), [functionStep]);
}
if (content is not null)
@@ -364,7 +365,9 @@ public static async IAsyncEnumerable InvokeStreamin
ToolDefinition[]? tools = [.. agent.Tools, .. kernel.Plugins.SelectMany(p => p.Select(f => f.ToToolDefinition(p.Name)))];
- RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(agent.Definition, invocationOptions);
+ string? instructions = await agent.GetInstructionsAsync(kernel, arguments, cancellationToken).ConfigureAwait(false);
+
+ RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(agent.Definition, instructions, invocationOptions);
options.ToolsOverride.AddRange(tools);
@@ -425,7 +428,7 @@ public static async IAsyncEnumerable InvokeStreamin
ToolOutput[] toolOutputs = GenerateToolOutputs(functionResults);
asyncUpdates = client.SubmitToolOutputsToRunStreamingAsync(run, toolOutputs);
- messages?.Add(GenerateFunctionResultsContent(agent.GetName(), functionResults));
+ messages?.Add(GenerateFunctionResultContent(agent.GetName(), functionResults));
}
}
@@ -631,24 +634,7 @@ private static ChatMessageContent GenerateFunctionCallContent(string agentName,
return functionCallContent;
}
- private static ChatMessageContent GenerateFunctionResultContent(string agentName, FunctionCallContent functionCall, string result)
- {
- ChatMessageContent functionCallContent = new(AuthorRole.Tool, content: null)
- {
- AuthorName = agentName
- };
-
- functionCallContent.Items.Add(
- new FunctionResultContent(
- functionCall.FunctionName,
- functionCall.PluginName,
- functionCall.Id,
- result));
-
- return functionCallContent;
- }
-
- private static ChatMessageContent GenerateFunctionResultsContent(string agentName, IList functionResults)
+ private static ChatMessageContent GenerateFunctionResultContent(string agentName, FunctionResultContent[] functionResults)
{
ChatMessageContent functionResultContent = new(AuthorRole.Tool, content: null)
{
@@ -674,7 +660,7 @@ private static Task[] ExecuteFunctionSteps(OpenAIAssistan
for (int index = 0; index < functionCalls.Length; ++index)
{
- functionTasks[index] = functionCalls[index].InvokeAsync(agent.Kernel, cancellationToken);
+ functionTasks[index] = ExecuteFunctionStep(agent, functionCalls[index], cancellationToken);
}
return functionTasks;
diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs
index 1e79fa31b95d..3ae5d03472a9 100644
--- a/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs
+++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs
@@ -27,20 +27,13 @@ public sealed class OpenAIAssistantAgent : KernelAgent
public const string CodeInterpreterMetadataKey = "code";
internal const string OptionsMetadataKey = "__run_options";
+ internal const string TemplateMetadataKey = "__template_format";
private readonly OpenAIClientProvider _provider;
private readonly Assistant _assistant;
private readonly AssistantClient _client;
private readonly string[] _channelKeys;
- ///
- /// Optional arguments for the agent.
- ///
- ///
- /// This property is not currently used by the agent, but is provided for future extensibility.
- ///
- public KernelArguments? Arguments { get; init; }
-
///
/// The assistant definition.
///
@@ -65,15 +58,64 @@ public sealed class OpenAIAssistantAgent : KernelAgent
///
/// Define a new .
///
+ /// OpenAI client provider for accessing the API service.
+ /// Defines the assistant's capabilities.
/// The containing services, plugins, and other state for use throughout the operation.
+ /// Required arguments that provide default template parameters, including any .
+ /// Prompt template configuration
+ /// An optional factory to produce the for the agent
+ /// The to monitor for cancellation requests. The default is .
+ /// An instance
+ public async static Task CreateFromTemplateAsync(
+ OpenAIClientProvider clientProvider,
+ OpenAIAssistantCapabilities capabilities,
+ Kernel kernel,
+ KernelArguments defaultArguments,
+ PromptTemplateConfig templateConfig,
+ IPromptTemplateFactory? templateFactory = null,
+ CancellationToken cancellationToken = default)
+ {
+ // Validate input
+ Verify.NotNull(kernel, nameof(kernel));
+ Verify.NotNull(defaultArguments, nameof(defaultArguments));
+ Verify.NotNull(clientProvider, nameof(clientProvider));
+ Verify.NotNull(capabilities, nameof(capabilities));
+ Verify.NotNull(templateConfig, nameof(templateConfig));
+
+ // Ensure template is valid (avoid failure after posting assistant creation)
+ IPromptTemplate? template = templateFactory?.Create(templateConfig);
+
+ // Create the client
+ AssistantClient client = CreateClient(clientProvider);
+
+ // Create the assistant
+ AssistantCreationOptions assistantCreationOptions = templateConfig.CreateAssistantOptions(capabilities);
+ Assistant model = await client.CreateAssistantAsync(capabilities.ModelId, assistantCreationOptions, cancellationToken).ConfigureAwait(false);
+
+ // Instantiate the agent
+ return
+ new OpenAIAssistantAgent(model, clientProvider, client)
+ {
+ Kernel = kernel,
+ Arguments = defaultArguments,
+ Template = template,
+ };
+ }
+
+ ///
+ /// Define a new .
+ ///
/// OpenAI client provider for accessing the API service.
/// The assistant definition.
+ /// The containing services, plugins, and other state for use throughout the operation.
+ /// Optional default arguments, including any .
/// The to monitor for cancellation requests. The default is .
/// An instance
public static async Task CreateAsync(
- Kernel kernel,
OpenAIClientProvider clientProvider,
OpenAIAssistantDefinition definition,
+ Kernel kernel,
+ KernelArguments? defaultArguments = null,
CancellationToken cancellationToken = default)
{
// Validate input
@@ -85,7 +127,7 @@ public static async Task CreateAsync(
AssistantClient client = CreateClient(clientProvider);
// Create the assistant
- AssistantCreationOptions assistantCreationOptions = CreateAssistantCreationOptions(definition);
+ AssistantCreationOptions assistantCreationOptions = definition.CreateAssistantOptions();
Assistant model = await client.CreateAssistantAsync(definition.ModelId, assistantCreationOptions, cancellationToken).ConfigureAwait(false);
// Instantiate the agent
@@ -93,6 +135,7 @@ public static async Task CreateAsync(
new OpenAIAssistantAgent(model, clientProvider, client)
{
Kernel = kernel,
+ Arguments = defaultArguments
};
}
@@ -122,28 +165,45 @@ public static async IAsyncEnumerable ListDefinitionsA
///
/// Retrieve a by identifier.
///
- /// The containing services, plugins, and other state for use throughout the operation.
- /// Configuration for accessing the API service.
+ /// Configuration for accessing the API service.
/// The agent identifier
+ /// The containing services, plugins, and other state for use throughout the operation.
+ /// Optional default arguments, including any .
+ /// An optional factory to produce the for the agent
/// The to monitor for cancellation requests. The default is .
/// An instance
public static async Task RetrieveAsync(
- Kernel kernel,
- OpenAIClientProvider provider,
+ OpenAIClientProvider clientProvider,
string id,
+ Kernel kernel,
+ KernelArguments? defaultArguments = null,
+ IPromptTemplateFactory? templateFactory = null,
CancellationToken cancellationToken = default)
{
+ // Validate input
+ Verify.NotNull(kernel, nameof(kernel));
+ Verify.NotNull(clientProvider, nameof(clientProvider));
+ Verify.NotNullOrWhiteSpace(id, nameof(id));
+
// Create the client
- AssistantClient client = CreateClient(provider);
+ AssistantClient client = CreateClient(clientProvider);
// Retrieve the assistant
Assistant model = await client.GetAssistantAsync(id, cancellationToken).ConfigureAwait(false);
+ // Ensure template is valid (avoid failure after posting assistant creation)
+ IPromptTemplate? template =
+ !string.IsNullOrWhiteSpace(model.Instructions) ?
+ templateFactory?.Create(new PromptTemplateConfig(model.Instructions!)) :
+ null;
+
// Instantiate the agent
return
- new OpenAIAssistantAgent(model, provider, client)
+ new OpenAIAssistantAgent(model, clientProvider, client)
{
Kernel = kernel,
+ Arguments = defaultArguments,
+ Template = template,
};
}
@@ -250,7 +310,7 @@ public async Task DeleteAsync(CancellationToken cancellationToken = defaul
/// Optional arguments to pass to the agents's invocation, including any .
/// The containing services, plugins, and other state for use by the agent.
/// The to monitor for cancellation requests. The default is .
- /// Asynchronous enumeration of messages.
+ /// Asynchronous enumeration of response messages.
///
/// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility.
///
@@ -269,7 +329,7 @@ public IAsyncEnumerable InvokeAsync(
/// Optional arguments to pass to the agents's invocation, including any .
/// The containing services, plugins, and other state for use by the agent.
/// The to monitor for cancellation requests. The default is .
- /// Asynchronous enumeration of messages.
+ /// Asynchronous enumeration of response messages.
///
/// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility.
///
@@ -295,7 +355,7 @@ public async IAsyncEnumerable InvokeAsync(
}
///
- /// Invoke the assistant on the specified thread.
+ /// Invoke the assistant on the specified thread with streaming response.
///
/// The thread identifier
/// Optional arguments to pass to the agents's invocation, including any .
@@ -315,7 +375,7 @@ public IAsyncEnumerable InvokeStreamingAsync(
=> this.InvokeStreamingAsync(threadId, options: null, arguments, kernel, messages, cancellationToken);
///
- /// Invoke the assistant on the specified thread.
+ /// Invoke the assistant on the specified thread with streaming response.
///
/// The thread identifier
/// Optional invocation options
@@ -383,6 +443,9 @@ internal void ThrowIfDeleted()
}
}
+ internal Task GetInstructionsAsync(Kernel kernel, KernelArguments? arguments, CancellationToken cancellationToken) =>
+ this.FormatInstructionsAsync(kernel, arguments, cancellationToken);
+
///
/// Initializes a new instance of the class.
///
@@ -435,63 +498,8 @@ private static OpenAIAssistantDefinition CreateAssistantDefinition(Assistant mod
};
}
- private static AssistantCreationOptions CreateAssistantCreationOptions(OpenAIAssistantDefinition definition)
- {
- AssistantCreationOptions assistantCreationOptions =
- new()
- {
- Description = definition.Description,
- Instructions = definition.Instructions,
- Name = definition.Name,
- ToolResources =
- AssistantToolResourcesFactory.GenerateToolResources(
- definition.EnableFileSearch ? definition.VectorStoreId : null,
- definition.EnableCodeInterpreter ? definition.CodeInterpreterFileIds : null),
- ResponseFormat = definition.EnableJsonResponse ? AssistantResponseFormat.JsonObject : AssistantResponseFormat.Auto,
- Temperature = definition.Temperature,
- NucleusSamplingFactor = definition.TopP,
- };
-
- if (definition.Metadata != null)
- {
- foreach (KeyValuePair item in definition.Metadata)
- {
- assistantCreationOptions.Metadata[item.Key] = item.Value;
- }
- }
-
- if (definition.ExecutionOptions != null)
- {
- string optionsJson = JsonSerializer.Serialize(definition.ExecutionOptions);
- assistantCreationOptions.Metadata[OptionsMetadataKey] = optionsJson;
- }
-
- if (definition.EnableCodeInterpreter)
- {
- assistantCreationOptions.Tools.Add(ToolDefinition.CreateCodeInterpreter());
- }
-
- if (definition.EnableFileSearch)
- {
- assistantCreationOptions.Tools.Add(ToolDefinition.CreateFileSearch());
- }
-
- return assistantCreationOptions;
- }
-
private static AssistantClient CreateClient(OpenAIClientProvider config)
{
return config.Client.GetAssistantClient();
}
-
- private static IEnumerable DefineChannelKeys(OpenAIClientProvider config)
- {
- // Distinguish from other channel types.
- yield return typeof(AgentChannel).FullName!;
-
- foreach (string key in config.ConfigurationKeys)
- {
- yield return key;
- }
- }
}
diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantCapabilities.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantCapabilities.cs
new file mode 100644
index 000000000000..c2247ec11e88
--- /dev/null
+++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantCapabilities.cs
@@ -0,0 +1,94 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System.Collections.Generic;
+using System.Text.Json.Serialization;
+
+namespace Microsoft.SemanticKernel.Agents.OpenAI;
+
+///
+/// Defines the capabilities of an assistant.
+///
+public class OpenAIAssistantCapabilities
+{
+ ///
+ /// Identifies the AI model targeted by the agent.
+ ///
+ public string ModelId { get; }
+
+ ///
+ /// The assistant's unique id. (Ignored on create.)
+ ///
+ public string Id { get; init; } = string.Empty;
+
+ ///
+ /// Optional file-ids made available to the code_interpreter tool, if enabled.
+ ///
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public IReadOnlyList? CodeInterpreterFileIds { get; init; }
+
+ ///
+ /// Set if code-interpreter is enabled.
+ ///
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
+ public bool EnableCodeInterpreter { get; init; }
+
+ ///
+ /// Set if file-search is enabled.
+ ///
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
+ public bool EnableFileSearch { get; init; }
+
+ ///
+ /// Set if json response-format is enabled.
+ ///
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
+ public bool EnableJsonResponse { get; init; }
+
+ ///
+ /// A set of up to 16 key/value pairs that can be attached to an agent, used for
+ /// storing additional information about that object in a structured format.Keys
+ /// may be up to 64 characters in length and values may be up to 512 characters in length.
+ ///
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public IReadOnlyDictionary? Metadata { get; init; }
+
+ ///
+ /// The sampling temperature to use, between 0 and 2.
+ ///
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public float? Temperature { get; init; }
+
+ ///
+ /// An alternative to sampling with temperature, called nucleus sampling, where the model
+ /// considers the results of the tokens with top_p probability mass.
+ /// So 0.1 means only the tokens comprising the top 10% probability mass are considered.
+ ///
+ ///
+ /// Recommended to set this or temperature but not both.
+ ///
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public float? TopP { get; init; }
+
+ ///
+ /// Requires file-search if specified.
+ ///
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public string? VectorStoreId { get; init; }
+
+ ///
+ /// Default execution options for each agent invocation.
+ ///
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public OpenAIAssistantExecutionOptions? ExecutionOptions { get; init; }
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// The targeted model
+ [JsonConstructor]
+ public OpenAIAssistantCapabilities(string modelId)
+ {
+ Verify.NotNullOrWhiteSpace(modelId);
+
+ this.ModelId = modelId;
+ }
+}
diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantChannel.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantChannel.cs
index 455a41bc5c76..9e69e997e095 100644
--- a/dotnet/src/Agents/OpenAI/OpenAIAssistantChannel.cs
+++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantChannel.cs
@@ -38,6 +38,8 @@ protected override async Task ReceiveAsync(IEnumerable histo
///
protected override IAsyncEnumerable InvokeStreamingAsync(OpenAIAssistantAgent agent, IList messages, CancellationToken cancellationToken = default)
{
+ agent.ThrowIfDeleted();
+
return AssistantThreadActions.InvokeStreamingAsync(agent, this._client, this._threadId, messages, invocationOptions: null, this.Logger, agent.Kernel, agent.Arguments, cancellationToken);
}
diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantDefinition.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantDefinition.cs
index 7b7015aa3b4a..79ad3f98f03e 100644
--- a/dotnet/src/Agents/OpenAI/OpenAIAssistantDefinition.cs
+++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantDefinition.cs
@@ -1,5 +1,4 @@
// Copyright (c) Microsoft. All rights reserved.
-using System.Collections.Generic;
using System.Text.Json.Serialization;
namespace Microsoft.SemanticKernel.Agents.OpenAI;
@@ -7,24 +6,14 @@ namespace Microsoft.SemanticKernel.Agents.OpenAI;
///
/// Defines an assistant.
///
-public sealed class OpenAIAssistantDefinition
+public sealed class OpenAIAssistantDefinition : OpenAIAssistantCapabilities
{
- ///
- /// Identifies the AI model targeted by the agent.
- ///
- public string ModelId { get; }
-
///
/// The description of the assistant.
///
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Description { get; init; }
- ///
- /// The assistant's unique id. (Ignored on create.)
- ///
- public string Id { get; init; } = string.Empty;
-
///
/// The system instructions for the assistant to use.
///
@@ -38,65 +27,24 @@ public sealed class OpenAIAssistantDefinition
public string? Name { get; init; }
///
- /// Optional file-ids made available to the code_interpreter tool, if enabled.
- ///
- [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
- public IReadOnlyList? CodeInterpreterFileIds { get; init; }
-
- ///
- /// Set if code-interpreter is enabled.
+ /// Provide the captured template format for the assistant if needed for agent retrieval.
+ /// ()
///
- [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
- public bool EnableCodeInterpreter { get; init; }
-
- ///
- /// Set if file-search is enabled.
- ///
- [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
- public bool EnableFileSearch { get; init; }
-
- ///
- /// Set if json response-format is enabled.
- ///
- [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
- public bool EnableJsonResponse { get; init; }
-
- ///
- /// A set of up to 16 key/value pairs that can be attached to an agent, used for
- /// storing additional information about that object in a structured format.Keys
- /// may be up to 64 characters in length and values may be up to 512 characters in length.
- ///
- [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
- public IReadOnlyDictionary? Metadata { get; init; }
-
- ///
- /// The sampling temperature to use, between 0 and 2.
- ///
- [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
- public float? Temperature { get; init; }
+ [JsonIgnore]
+ public string? TemplateFactoryFormat
+ {
+ get
+ {
+ if (this.Metadata == null)
+ {
+ return null;
+ }
- ///
- /// An alternative to sampling with temperature, called nucleus sampling, where the model
- /// considers the results of the tokens with top_p probability mass.
- /// So 0.1 means only the tokens comprising the top 10% probability mass are considered.
- ///
- ///
- /// Recommended to set this or temperature but not both.
- ///
- [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
- public float? TopP { get; init; }
+ this.Metadata.TryGetValue(OpenAIAssistantAgent.TemplateMetadataKey, out string? templateFormat);
- ///
- /// Requires file-search if specified.
- ///
- [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
- public string? VectorStoreId { get; init; }
-
- ///
- /// Default execution options for each agent invocation.
- ///
- [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
- public OpenAIAssistantExecutionOptions? ExecutionOptions { get; init; }
+ return templateFormat;
+ }
+ }
///
/// Initializes a new instance of the class.
@@ -104,9 +52,5 @@ public sealed class OpenAIAssistantDefinition
/// The targeted model
[JsonConstructor]
public OpenAIAssistantDefinition(string modelId)
- {
- Verify.NotNullOrWhiteSpace(modelId);
-
- this.ModelId = modelId;
- }
+ : base(modelId) { }
}
diff --git a/dotnet/src/Agents/OpenAI/OpenAIClientProvider.cs b/dotnet/src/Agents/OpenAI/OpenAIClientProvider.cs
index 0b60b66fa84a..abe8fc149d88 100644
--- a/dotnet/src/Agents/OpenAI/OpenAIClientProvider.cs
+++ b/dotnet/src/Agents/OpenAI/OpenAIClientProvider.cs
@@ -91,7 +91,7 @@ public static OpenAIClientProvider ForOpenAI(Uri? endpoint = null, HttpClient? h
public static OpenAIClientProvider ForOpenAI(ApiKeyCredential apiKey, Uri? endpoint = null, HttpClient? httpClient = null)
{
OpenAIClientOptions clientOptions = CreateOpenAIClientOptions(endpoint, httpClient);
- return new(new OpenAIClient(apiKey ?? SingleSpaceKey, clientOptions), CreateConfigurationKeys(endpoint, httpClient));
+ return new(new OpenAIClient(apiKey, clientOptions), CreateConfigurationKeys(endpoint, httpClient));
}
///
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantRunOptionsFactoryTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantRunOptionsFactoryTests.cs
index e3aa50473e81..39d3eb58d11a 100644
--- a/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantRunOptionsFactoryTests.cs
+++ b/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantRunOptionsFactoryTests.cs
@@ -31,10 +31,11 @@ public void AssistantRunOptionsFactoryExecutionOptionsNullTest()
};
// Act
- RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(definition, null);
+ RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(definition, null, null);
// Assert
Assert.NotNull(options);
+ Assert.Null(options.InstructionsOverride);
Assert.Null(options.Temperature);
Assert.Null(options.NucleusSamplingFactor);
Assert.Equal("test", options.AdditionalInstructions);
@@ -61,10 +62,11 @@ public void AssistantRunOptionsFactoryExecutionOptionsEquivalentTest()
};
// Act
- RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(definition, invocationOptions);
+ RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(definition, "test", invocationOptions);
// Assert
Assert.NotNull(options);
+ Assert.Equal("test", options.InstructionsOverride);
Assert.Null(options.Temperature);
Assert.Null(options.NucleusSamplingFactor);
}
@@ -98,7 +100,7 @@ public void AssistantRunOptionsFactoryExecutionOptionsOverrideTest()
};
// Act
- RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(definition, invocationOptions);
+ RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(definition, null, invocationOptions);
// Assert
Assert.NotNull(options);
@@ -138,7 +140,7 @@ public void AssistantRunOptionsFactoryExecutionOptionsMetadataTest()
};
// Act
- RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(definition, invocationOptions);
+ RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(definition, null, invocationOptions);
// Assert
Assert.Equal(2, options.Metadata.Count);
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentTests.cs
index ef67c48f1473..364d81ee7236 100644
--- a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentTests.cs
+++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentTests.cs
@@ -1,11 +1,11 @@
// Copyright (c) Microsoft. All rights reserved.
using System;
using System.Collections.Generic;
+using System.IO;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Text;
-using System.Text.Json;
using System.Threading.Tasks;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Agents;
@@ -59,6 +59,30 @@ public async Task VerifyOpenAIAssistantAgentCreationPropertiesAsync()
await this.VerifyAgentCreationAsync(definition);
}
+ ///
+ /// Verify the invocation and response of
+ /// for an agent with name, instructions, and description from a template.
+ ///
+ [Fact]
+ public async Task VerifyOpenAIAssistantAgentCreationDefaultTemplateAsync()
+ {
+ // Arrange
+ PromptTemplateConfig templateConfig =
+ new("test instructions")
+ {
+ Name = "testname",
+ Description = "testdescription",
+ };
+
+ OpenAIAssistantCapabilities capabilities = new("testmodel");
+
+ // Act and Assert
+ await this.VerifyAgentTemplateAsync(capabilities, templateConfig);
+
+ // Act and Assert
+ await this.VerifyAgentTemplateAsync(capabilities, templateConfig, new KernelPromptTemplateFactory());
+ }
+
///
/// Verify the invocation and response of
/// for an agent with code-interpreter enabled.
@@ -282,13 +306,36 @@ public async Task VerifyOpenAIAssistantAgentRetrievalAsync()
// Arrange
OpenAIAssistantDefinition definition = new("testmodel");
- this.SetupResponse(HttpStatusCode.OK, ResponseContent.CreateAgentPayload(definition));
+ this.SetupResponse(HttpStatusCode.OK, definition);
+
+ OpenAIAssistantAgent agent =
+ await OpenAIAssistantAgent.RetrieveAsync(
+ this.CreateTestConfiguration(),
+ "#id",
+ this._emptyKernel);
+
+ // Act and Assert
+ ValidateAgentDefinition(agent, definition);
+ }
+
+ ///
+ /// Verify the invocation and response of .
+ ///
+ [Fact]
+ public async Task VerifyOpenAIAssistantAgentRetrievalWithFactoryAsync()
+ {
+ // Arrange
+ OpenAIAssistantDefinition definition = new("testmodel");
+
+ this.SetupResponse(HttpStatusCode.OK, definition);
OpenAIAssistantAgent agent =
await OpenAIAssistantAgent.RetrieveAsync(
- this._emptyKernel,
this.CreateTestConfiguration(),
- "#id");
+ "#id",
+ this._emptyKernel,
+ new KernelArguments(),
+ new KernelPromptTemplateFactory());
// Act and Assert
ValidateAgentDefinition(agent, definition);
@@ -306,7 +353,7 @@ public async Task VerifyOpenAIAssistantAgentDeleteAsync()
Assert.False(agent.IsDeleted);
// Arrange
- this.SetupResponse(HttpStatusCode.OK, ResponseContent.DeleteAgent);
+ this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.DeleteAgent);
// Act
await agent.DeleteAsync();
@@ -318,11 +365,14 @@ public async Task VerifyOpenAIAssistantAgentDeleteAsync()
// Assert
Assert.True(agent.IsDeleted);
await Assert.ThrowsAsync(() => agent.AddChatMessageAsync("threadid", new(AuthorRole.User, "test")));
+ await Assert.ThrowsAsync(() => agent.GetThreadMessagesAsync("threadid").ToArrayAsync().AsTask());
await Assert.ThrowsAsync(() => agent.InvokeAsync("threadid").ToArrayAsync().AsTask());
+ await Assert.ThrowsAsync(() => agent.InvokeStreamingAsync("threadid").ToArrayAsync().AsTask());
+ await Assert.ThrowsAsync(() => agent.InvokeStreamingAsync("threadid", new OpenAIAssistantInvocationOptions()).ToArrayAsync().AsTask());
}
///
- /// Verify the deletion of agent via .
+ /// Verify the creating a thread via .
///
[Fact]
public async Task VerifyOpenAIAssistantAgentCreateThreadAsync()
@@ -330,7 +380,7 @@ public async Task VerifyOpenAIAssistantAgentCreateThreadAsync()
// Arrange
OpenAIAssistantAgent agent = await this.CreateAgentAsync();
- this.SetupResponse(HttpStatusCode.OK, ResponseContent.CreateThread);
+ this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.CreateThread);
// Act
string threadId = await agent.CreateThreadAsync();
@@ -338,30 +388,65 @@ public async Task VerifyOpenAIAssistantAgentCreateThreadAsync()
Assert.NotNull(threadId);
// Arrange
- this.SetupResponse(HttpStatusCode.OK, ResponseContent.CreateThread);
-
+ this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.CreateThread);
// Act
- threadId = await agent.CreateThreadAsync(new());
+ threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions());
// Assert
Assert.NotNull(threadId);
}
///
- /// Verify complex chat interaction across multiple states.
+ /// Verify the deleting a thread via .
+ ///
+ [Fact]
+ public async Task VerifyOpenAIAssistantAgentDeleteThreadAsync()
+ {
+ // Arrange
+ OpenAIAssistantAgent agent = await this.CreateAgentAsync();
+
+ this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.DeleteThread);
+
+ // Act
+ bool isDeleted = await agent.DeleteThreadAsync("threadid");
+ // Assert
+ Assert.True(isDeleted);
+ }
+
+ ///
+ /// Verify the deleting a thread via .
///
[Fact]
- public async Task VerifyOpenAIAssistantAgentChatTextMessageAsync()
+ public async Task VerifyOpenAIAssistantAgentUploadFileAsync()
+ {
+ // Arrange
+ OpenAIAssistantAgent agent = await this.CreateAgentAsync();
+
+ this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.UploadFile);
+
+ // Act
+ using MemoryStream stream = new(Encoding.UTF8.GetBytes("test"));
+ string fileId = await agent.UploadFileAsync(stream, "text.txt");
+
+ // Assert
+ Assert.NotNull(fileId);
+ }
+
+ ///
+ /// Verify invocation via .
+ ///
+ [Fact]
+ public async Task VerifyOpenAIAssistantAgentGroupChatAsync()
{
// Arrange
OpenAIAssistantAgent agent = await this.CreateAgentAsync();
this.SetupResponses(
HttpStatusCode.OK,
- ResponseContent.CreateThread,
- ResponseContent.CreateRun,
- ResponseContent.CompletedRun,
- ResponseContent.MessageSteps,
- ResponseContent.GetTextMessage);
+ OpenAIAssistantResponseContent.CreateThread,
+ OpenAIAssistantResponseContent.Run.CreateRun,
+ OpenAIAssistantResponseContent.Run.CompletedRun,
+ OpenAIAssistantResponseContent.Run.MessageSteps,
+ OpenAIAssistantResponseContent.GetTextMessage());
AgentGroupChat chat = new();
@@ -372,6 +457,41 @@ public async Task VerifyOpenAIAssistantAgentChatTextMessageAsync()
Assert.Single(messages);
Assert.Single(messages[0].Items);
Assert.IsType(messages[0].Items[0]);
+
+ // Arrange
+ this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.DeleteThread);
+
+ // Act
+ await chat.ResetAsync();
+
+ // Assert
+ Assert.Empty(this._messageHandlerStub.ResponseQueue);
+ }
+
+ ///
+ /// Verify direction invocation of .
+ ///
+ [Fact]
+ public async Task VerifyOpenAIAssistantAgentInvokeAsync()
+ {
+ // Arrange
+ OpenAIAssistantAgent agent = await this.CreateAgentAsync();
+
+ this.SetupResponses(
+ HttpStatusCode.OK,
+ OpenAIAssistantResponseContent.CreateThread,
+ OpenAIAssistantResponseContent.Run.CreateRun,
+ OpenAIAssistantResponseContent.Run.CompletedRun,
+ OpenAIAssistantResponseContent.Run.MessageSteps,
+ OpenAIAssistantResponseContent.GetTextMessage());
+
+ // Act
+ ChatMessageContent[] messages = await agent.InvokeAsync("threadid").ToArrayAsync();
+
+ // Assert
+ Assert.Single(messages);
+ Assert.Single(messages[0].Items);
+ Assert.IsType(messages[0].Items[0]);
}
///
@@ -385,11 +505,11 @@ public async Task VerifyOpenAIAssistantAgentChatTextMessageWithAnnotationAsync()
this.SetupResponses(
HttpStatusCode.OK,
- ResponseContent.CreateThread,
- ResponseContent.CreateRun,
- ResponseContent.CompletedRun,
- ResponseContent.MessageSteps,
- ResponseContent.GetTextMessageWithAnnotation);
+ OpenAIAssistantResponseContent.CreateThread,
+ OpenAIAssistantResponseContent.Run.CreateRun,
+ OpenAIAssistantResponseContent.Run.CompletedRun,
+ OpenAIAssistantResponseContent.Run.MessageSteps,
+ OpenAIAssistantResponseContent.GetTextMessageWithAnnotation);
AgentGroupChat chat = new();
@@ -414,11 +534,11 @@ public async Task VerifyOpenAIAssistantAgentChatImageMessageAsync()
this.SetupResponses(
HttpStatusCode.OK,
- ResponseContent.CreateThread,
- ResponseContent.CreateRun,
- ResponseContent.CompletedRun,
- ResponseContent.MessageSteps,
- ResponseContent.GetImageMessage);
+ OpenAIAssistantResponseContent.CreateThread,
+ OpenAIAssistantResponseContent.Run.CreateRun,
+ OpenAIAssistantResponseContent.Run.CompletedRun,
+ OpenAIAssistantResponseContent.Run.MessageSteps,
+ OpenAIAssistantResponseContent.GetImageMessage);
AgentGroupChat chat = new();
@@ -443,11 +563,11 @@ public async Task VerifyOpenAIAssistantAgentGetMessagesAsync()
// Initialize agent channel
this.SetupResponses(
HttpStatusCode.OK,
- ResponseContent.CreateThread,
- ResponseContent.CreateRun,
- ResponseContent.CompletedRun,
- ResponseContent.MessageSteps,
- ResponseContent.GetTextMessage);
+ OpenAIAssistantResponseContent.CreateThread,
+ OpenAIAssistantResponseContent.Run.CreateRun,
+ OpenAIAssistantResponseContent.Run.CompletedRun,
+ OpenAIAssistantResponseContent.Run.MessageSteps,
+ OpenAIAssistantResponseContent.GetTextMessage());
AgentGroupChat chat = new();
@@ -459,9 +579,9 @@ public async Task VerifyOpenAIAssistantAgentGetMessagesAsync()
// Arrange: Setup messages
this.SetupResponses(
HttpStatusCode.OK,
- ResponseContent.ListMessagesPageMore,
- ResponseContent.ListMessagesPageMore,
- ResponseContent.ListMessagesPageFinal);
+ OpenAIAssistantResponseContent.ListMessagesPageMore,
+ OpenAIAssistantResponseContent.ListMessagesPageMore,
+ OpenAIAssistantResponseContent.ListMessagesPageFinal);
// Act: Get messages
messages = await chat.GetChatMessagesAsync(agent).ToArrayAsync();
@@ -469,6 +589,47 @@ public async Task VerifyOpenAIAssistantAgentGetMessagesAsync()
Assert.Equal(5, messages.Length);
}
+ ///
+ /// Verify message retrieval via .
+ ///
+ [Fact]
+ public async Task VerifyOpenAIAssistantAgentAddThreadMessagesAsync()
+ {
+ // Arrange: Create agent
+ OpenAIAssistantAgent agent = await this.CreateAgentAsync();
+ // Arrange: Setup messages
+ this.SetupResponses(
+ HttpStatusCode.OK,
+ OpenAIAssistantResponseContent.GetTextMessage());
+
+ // Act (no exception)
+ await agent.AddChatMessageAsync(agent.Id, new ChatMessageContent(AuthorRole.User, "hi"));
+ Assert.Empty(this._messageHandlerStub.ResponseQueue);
+ }
+
+ ///
+ /// Verify message retrieval via .
+ ///
+ [Fact]
+ public async Task VerifyOpenAIAssistantAgentGetThreadMessagesAsync()
+ {
+ // Arrange: Create agent
+ OpenAIAssistantAgent agent = await this.CreateAgentAsync();
+
+ // Arrange: Setup messages
+ this.SetupResponses(
+ HttpStatusCode.OK,
+ OpenAIAssistantResponseContent.ListMessagesPageMore,
+ OpenAIAssistantResponseContent.ListMessagesPageMore,
+ OpenAIAssistantResponseContent.ListMessagesPageFinal);
+
+ // Act: Get messages
+ ChatMessageContent[] messages = await agent.GetThreadMessagesAsync("threadid").ToArrayAsync();
+
+ // Assert
+ Assert.Equal(5, messages.Length);
+ }
+
///
/// Verify complex chat interaction across multiple states.
///
@@ -481,11 +642,11 @@ public async Task VerifyOpenAIAssistantAgentAddMessagesAsync()
// Initialize agent channel
this.SetupResponses(
HttpStatusCode.OK,
- ResponseContent.CreateThread,
- ResponseContent.CreateRun,
- ResponseContent.CompletedRun,
- ResponseContent.MessageSteps,
- ResponseContent.GetTextMessage);
+ OpenAIAssistantResponseContent.CreateThread,
+ OpenAIAssistantResponseContent.Run.CreateRun,
+ OpenAIAssistantResponseContent.Run.CompletedRun,
+ OpenAIAssistantResponseContent.Run.MessageSteps,
+ OpenAIAssistantResponseContent.GetTextMessage());
AgentGroupChat chat = new();
// Act
@@ -513,9 +674,9 @@ public async Task VerifyOpenAIAssistantAgentListDefinitionAsync()
this.SetupResponses(
HttpStatusCode.OK,
- ResponseContent.ListAgentsPageMore,
- ResponseContent.ListAgentsPageMore,
- ResponseContent.ListAgentsPageFinal);
+ OpenAIAssistantResponseContent.ListAgentsPageMore,
+ OpenAIAssistantResponseContent.ListAgentsPageMore,
+ OpenAIAssistantResponseContent.ListAgentsPageFinal);
// Act
var messages =
@@ -527,8 +688,8 @@ await OpenAIAssistantAgent.ListDefinitionsAsync(
// Arrange
this.SetupResponses(
HttpStatusCode.OK,
- ResponseContent.ListAgentsPageMore,
- ResponseContent.ListAgentsPageFinal);
+ OpenAIAssistantResponseContent.ListAgentsPageMore,
+ OpenAIAssistantResponseContent.ListAgentsPageFinal);
// Act
messages =
@@ -552,14 +713,14 @@ public async Task VerifyOpenAIAssistantAgentWithFunctionCallAsync()
this.SetupResponses(
HttpStatusCode.OK,
- ResponseContent.CreateThread,
- ResponseContent.CreateRun,
- ResponseContent.PendingRun,
- ResponseContent.ToolSteps,
- ResponseContent.ToolResponse,
- ResponseContent.CompletedRun,
- ResponseContent.MessageSteps,
- ResponseContent.GetTextMessage);
+ OpenAIAssistantResponseContent.CreateThread,
+ OpenAIAssistantResponseContent.Run.CreateRun,
+ OpenAIAssistantResponseContent.Run.PendingRun,
+ OpenAIAssistantResponseContent.Run.ToolSteps,
+ OpenAIAssistantResponseContent.ToolResponse,
+ OpenAIAssistantResponseContent.Run.CompletedRun,
+ OpenAIAssistantResponseContent.Run.MessageSteps,
+ OpenAIAssistantResponseContent.GetTextMessage());
AgentGroupChat chat = new();
@@ -591,44 +752,78 @@ public OpenAIAssistantAgentTests()
private async Task VerifyAgentCreationAsync(OpenAIAssistantDefinition definition)
{
- this.SetupResponse(HttpStatusCode.OK, ResponseContent.CreateAgentPayload(definition));
+ this.SetupResponse(HttpStatusCode.OK, definition);
OpenAIAssistantAgent agent =
await OpenAIAssistantAgent.CreateAsync(
- this._emptyKernel,
this.CreateTestConfiguration(),
- definition);
+ definition,
+ this._emptyKernel);
ValidateAgentDefinition(agent, definition);
}
- private static void ValidateAgentDefinition(OpenAIAssistantAgent agent, OpenAIAssistantDefinition sourceDefinition)
+ private async Task VerifyAgentTemplateAsync(
+ OpenAIAssistantCapabilities capabilities,
+ PromptTemplateConfig templateConfig,
+ IPromptTemplateFactory? templateFactory = null)
+ {
+ this.SetupResponse(HttpStatusCode.OK, capabilities, templateConfig);
+
+ OpenAIAssistantAgent agent =
+ await OpenAIAssistantAgent.CreateFromTemplateAsync(
+ this.CreateTestConfiguration(),
+ capabilities,
+ this._emptyKernel,
+ new KernelArguments(),
+ templateConfig,
+ templateFactory);
+
+ ValidateAgentDefinition(agent, capabilities, templateConfig);
+ }
+
+ private static void ValidateAgentDefinition(OpenAIAssistantAgent agent, OpenAIAssistantDefinition expectedConfig)
+ {
+ ValidateAgent(agent, expectedConfig.Name, expectedConfig.Instructions, expectedConfig.Description, expectedConfig);
+ }
+
+ private static void ValidateAgentDefinition(OpenAIAssistantAgent agent, OpenAIAssistantCapabilities expectedConfig, PromptTemplateConfig templateConfig)
+ {
+ ValidateAgent(agent, templateConfig.Name, templateConfig.Template, templateConfig.Description, expectedConfig);
+ }
+
+ private static void ValidateAgent(
+ OpenAIAssistantAgent agent,
+ string? expectedName,
+ string? expectedInstructions,
+ string? expectedDescription,
+ OpenAIAssistantCapabilities expectedConfig)
{
// Verify fundamental state
Assert.NotNull(agent);
Assert.NotNull(agent.Id);
Assert.False(agent.IsDeleted);
Assert.NotNull(agent.Definition);
- Assert.Equal(sourceDefinition.ModelId, agent.Definition.ModelId);
+ Assert.Equal(expectedConfig.ModelId, agent.Definition.ModelId);
// Verify core properties
- Assert.Equal(sourceDefinition.Instructions ?? string.Empty, agent.Instructions);
- Assert.Equal(sourceDefinition.Name ?? string.Empty, agent.Name);
- Assert.Equal(sourceDefinition.Description ?? string.Empty, agent.Description);
+ Assert.Equal(expectedInstructions ?? string.Empty, agent.Instructions);
+ Assert.Equal(expectedName ?? string.Empty, agent.Name);
+ Assert.Equal(expectedDescription ?? string.Empty, agent.Description);
// Verify options
- Assert.Equal(sourceDefinition.Temperature, agent.Definition.Temperature);
- Assert.Equal(sourceDefinition.TopP, agent.Definition.TopP);
- Assert.Equal(sourceDefinition.ExecutionOptions?.MaxCompletionTokens, agent.Definition.ExecutionOptions?.MaxCompletionTokens);
- Assert.Equal(sourceDefinition.ExecutionOptions?.MaxPromptTokens, agent.Definition.ExecutionOptions?.MaxPromptTokens);
- Assert.Equal(sourceDefinition.ExecutionOptions?.ParallelToolCallsEnabled, agent.Definition.ExecutionOptions?.ParallelToolCallsEnabled);
- Assert.Equal(sourceDefinition.ExecutionOptions?.TruncationMessageCount, agent.Definition.ExecutionOptions?.TruncationMessageCount);
+ Assert.Equal(expectedConfig.Temperature, agent.Definition.Temperature);
+ Assert.Equal(expectedConfig.TopP, agent.Definition.TopP);
+ Assert.Equal(expectedConfig.ExecutionOptions?.MaxCompletionTokens, agent.Definition.ExecutionOptions?.MaxCompletionTokens);
+ Assert.Equal(expectedConfig.ExecutionOptions?.MaxPromptTokens, agent.Definition.ExecutionOptions?.MaxPromptTokens);
+ Assert.Equal(expectedConfig.ExecutionOptions?.ParallelToolCallsEnabled, agent.Definition.ExecutionOptions?.ParallelToolCallsEnabled);
+ Assert.Equal(expectedConfig.ExecutionOptions?.TruncationMessageCount, agent.Definition.ExecutionOptions?.TruncationMessageCount);
// Verify tool definitions
int expectedToolCount = 0;
bool hasCodeInterpreter = false;
- if (sourceDefinition.EnableCodeInterpreter)
+ if (expectedConfig.EnableCodeInterpreter)
{
hasCodeInterpreter = true;
++expectedToolCount;
@@ -637,7 +832,7 @@ private static void ValidateAgentDefinition(OpenAIAssistantAgent agent, OpenAIAs
Assert.Equal(hasCodeInterpreter, agent.Tools.OfType().Any());
bool hasFileSearch = false;
- if (sourceDefinition.EnableFileSearch)
+ if (expectedConfig.EnableFileSearch)
{
hasFileSearch = true;
++expectedToolCount;
@@ -649,17 +844,17 @@ private static void ValidateAgentDefinition(OpenAIAssistantAgent agent, OpenAIAs
// Verify metadata
Assert.NotNull(agent.Definition.Metadata);
- if (sourceDefinition.ExecutionOptions == null)
+ if (expectedConfig.ExecutionOptions == null)
{
- Assert.Equal(sourceDefinition.Metadata ?? new Dictionary(), agent.Definition.Metadata);
+ Assert.Equal(expectedConfig.Metadata ?? new Dictionary(), agent.Definition.Metadata);
}
else // Additional metadata present when execution options are defined
{
- Assert.Equal((sourceDefinition.Metadata?.Count ?? 0) + 1, agent.Definition.Metadata.Count);
+ Assert.Equal((expectedConfig.Metadata?.Count ?? 0) + 1, agent.Definition.Metadata.Count);
- if (sourceDefinition.Metadata != null)
+ if (expectedConfig.Metadata != null)
{
- foreach (var (key, value) in sourceDefinition.Metadata)
+ foreach (var (key, value) in expectedConfig.Metadata)
{
string? targetValue = agent.Definition.Metadata[key];
Assert.NotNull(targetValue);
@@ -669,21 +864,21 @@ private static void ValidateAgentDefinition(OpenAIAssistantAgent agent, OpenAIAs
}
// Verify detail definition
- Assert.Equal(sourceDefinition.VectorStoreId, agent.Definition.VectorStoreId);
- Assert.Equal(sourceDefinition.CodeInterpreterFileIds, agent.Definition.CodeInterpreterFileIds);
+ Assert.Equal(expectedConfig.VectorStoreId, agent.Definition.VectorStoreId);
+ Assert.Equal(expectedConfig.CodeInterpreterFileIds, agent.Definition.CodeInterpreterFileIds);
}
private Task CreateAgentAsync()
{
OpenAIAssistantDefinition definition = new("testmodel");
- this.SetupResponse(HttpStatusCode.OK, ResponseContent.CreateAgentPayload(definition));
+ this.SetupResponse(HttpStatusCode.OK, definition);
return
OpenAIAssistantAgent.CreateAsync(
- this._emptyKernel,
this.CreateTestConfiguration(),
- definition);
+ definition,
+ this._emptyKernel);
}
private OpenAIClientProvider CreateTestConfiguration(bool targetAzure = false)
@@ -691,28 +886,17 @@ private OpenAIClientProvider CreateTestConfiguration(bool targetAzure = false)
OpenAIClientProvider.ForAzureOpenAI(apiKey: "fakekey", endpoint: new Uri("https://localhost"), this._httpClient) :
OpenAIClientProvider.ForOpenAI(apiKey: "fakekey", endpoint: null, this._httpClient);
- private void SetupResponse(HttpStatusCode statusCode, string content)
- {
- this._messageHandlerStub.ResponseToReturn =
- new(statusCode)
- {
- Content = new StringContent(content)
- };
- }
+ private void SetupResponse(HttpStatusCode statusCode, string content) =>
+ this._messageHandlerStub.SetupResponses(statusCode, content);
- private void SetupResponses(HttpStatusCode statusCode, params string[] content)
- {
- foreach (var item in content)
- {
-#pragma warning disable CA2000 // Dispose objects before losing scope
- this._messageHandlerStub.ResponseQueue.Enqueue(
- new(statusCode)
- {
- Content = new StringContent(item)
- });
-#pragma warning restore CA2000 // Dispose objects before losing scope
- }
- }
+ private void SetupResponse(HttpStatusCode statusCode, OpenAIAssistantDefinition definition) =>
+ this._messageHandlerStub.SetupResponses(statusCode, OpenAIAssistantResponseContent.AssistantDefinition(definition));
+
+ private void SetupResponse(HttpStatusCode statusCode, OpenAIAssistantCapabilities capabilities, PromptTemplateConfig templateConfig) =>
+ this._messageHandlerStub.SetupResponses(statusCode, OpenAIAssistantResponseContent.AssistantDefinition(capabilities, templateConfig));
+
+ private void SetupResponses(HttpStatusCode statusCode, params string[] content) =>
+ this._messageHandlerStub.SetupResponses(statusCode, content);
private sealed class MyPlugin
{
@@ -720,528 +904,4 @@ private sealed class MyPlugin
public void MyFunction(int index)
{ }
}
-
- private static class ResponseContent
- {
- public static string CreateAgentPayload(OpenAIAssistantDefinition definition)
- {
- StringBuilder builder = new();
- builder.AppendLine("{");
- builder.AppendLine(@" ""id"": ""asst_abc123"",");
- builder.AppendLine(@" ""object"": ""assistant"",");
- builder.AppendLine(@" ""created_at"": 1698984975,");
- builder.AppendLine(@$" ""name"": ""{definition.Name}"",");
- builder.AppendLine(@$" ""description"": ""{definition.Description}"",");
- builder.AppendLine(@$" ""instructions"": ""{definition.Instructions}"",");
- builder.AppendLine(@$" ""model"": ""{definition.ModelId}"",");
-
- bool hasCodeInterpreter = definition.EnableCodeInterpreter;
- bool hasCodeInterpreterFiles = (definition.CodeInterpreterFileIds?.Count ?? 0) > 0;
- bool hasFileSearch = definition.EnableFileSearch;
- if (!hasCodeInterpreter && !hasFileSearch)
- {
- builder.AppendLine(@" ""tools"": [],");
- }
- else
- {
- builder.AppendLine(@" ""tools"": [");
-
- if (hasCodeInterpreter)
- {
- builder.Append(@$" {{ ""type"": ""code_interpreter"" }}{(hasFileSearch ? "," : string.Empty)}");
- }
-
- if (hasFileSearch)
- {
- builder.AppendLine(@" { ""type"": ""file_search"" }");
- }
-
- builder.AppendLine(" ],");
- }
-
- if (!hasCodeInterpreterFiles && !hasFileSearch)
- {
- builder.AppendLine(@" ""tool_resources"": {},");
- }
- else
- {
- builder.AppendLine(@" ""tool_resources"": {");
-
- if (hasCodeInterpreterFiles)
- {
- string fileIds = string.Join(",", definition.CodeInterpreterFileIds!.Select(fileId => "\"" + fileId + "\""));
- builder.AppendLine(@$" ""code_interpreter"": {{ ""file_ids"": [{fileIds}] }}{(hasFileSearch ? "," : string.Empty)}");
- }
-
- if (hasFileSearch)
- {
- builder.AppendLine(@$" ""file_search"": {{ ""vector_store_ids"": [""{definition.VectorStoreId}""] }}");
- }
-
- builder.AppendLine(" },");
- }
-
- if (definition.Temperature.HasValue)
- {
- builder.AppendLine(@$" ""temperature"": {definition.Temperature},");
- }
-
- if (definition.TopP.HasValue)
- {
- builder.AppendLine(@$" ""top_p"": {definition.TopP},");
- }
-
- bool hasExecutionOptions = definition.ExecutionOptions != null;
- int metadataCount = (definition.Metadata?.Count ?? 0);
- if (metadataCount == 0 && !hasExecutionOptions)
- {
- builder.AppendLine(@" ""metadata"": {}");
- }
- else
- {
- int index = 0;
- builder.AppendLine(@" ""metadata"": {");
-
- if (hasExecutionOptions)
- {
- string serializedExecutionOptions = JsonSerializer.Serialize(definition.ExecutionOptions);
- builder.AppendLine(@$" ""{OpenAIAssistantAgent.OptionsMetadataKey}"": ""{JsonEncodedText.Encode(serializedExecutionOptions)}""{(metadataCount > 0 ? "," : string.Empty)}");
- }
-
- if (metadataCount > 0)
- {
- foreach (var (key, value) in definition.Metadata!)
- {
- builder.AppendLine(@$" ""{key}"": ""{value}""{(index < metadataCount - 1 ? "," : string.Empty)}");
- ++index;
- }
- }
-
- builder.AppendLine(" }");
- }
-
- builder.AppendLine("}");
-
- return builder.ToString();
- }
-
- public const string CreateAgentWithEverything =
- """
- {
- "tool_resources": {
- "file_search": { "vector_store_ids": ["#vs"] }
- },
- }
- """;
-
- public const string DeleteAgent =
- """
- {
- "id": "asst_abc123",
- "object": "assistant.deleted",
- "deleted": true
- }
- """;
-
- public const string CreateThread =
- """
- {
- "id": "thread_abc123",
- "object": "thread",
- "created_at": 1699012949,
- "metadata": {}
- }
- """;
-
- public const string CreateRun =
- """
- {
- "id": "run_abc123",
- "object": "thread.run",
- "created_at": 1699063290,
- "assistant_id": "asst_abc123",
- "thread_id": "thread_abc123",
- "status": "queued",
- "started_at": 1699063290,
- "expires_at": null,
- "cancelled_at": null,
- "failed_at": null,
- "completed_at": 1699063291,
- "last_error": null,
- "model": "gpt-4-turbo",
- "instructions": null,
- "tools": [],
- "file_ids": [],
- "metadata": {},
- "usage": null,
- "temperature": 1
- }
- """;
-
- public const string PendingRun =
- """
- {
- "id": "run_abc123",
- "object": "thread.run",
- "created_at": 1699063290,
- "assistant_id": "asst_abc123",
- "thread_id": "thread_abc123",
- "status": "requires_action",
- "started_at": 1699063290,
- "expires_at": null,
- "cancelled_at": null,
- "failed_at": null,
- "completed_at": 1699063291,
- "last_error": null,
- "model": "gpt-4-turbo",
- "instructions": null,
- "tools": [],
- "file_ids": [],
- "metadata": {},
- "usage": null,
- "temperature": 1
- }
- """;
-
- public const string CompletedRun =
- """
- {
- "id": "run_abc123",
- "object": "thread.run",
- "created_at": 1699063290,
- "assistant_id": "asst_abc123",
- "thread_id": "thread_abc123",
- "status": "completed",
- "started_at": 1699063290,
- "expires_at": null,
- "cancelled_at": null,
- "failed_at": null,
- "completed_at": 1699063291,
- "last_error": null,
- "model": "gpt-4-turbo",
- "instructions": null,
- "tools": [],
- "file_ids": [],
- "metadata": {},
- "usage": null,
- "temperature": 1
- }
- """;
-
- public const string MessageSteps =
- """
- {
- "object": "list",
- "data": [
- {
- "id": "step_abc123",
- "object": "thread.run.step",
- "created_at": 1699063291,
- "run_id": "run_abc123",
- "assistant_id": "asst_abc123",
- "thread_id": "thread_abc123",
- "type": "message_creation",
- "status": "completed",
- "cancelled_at": null,
- "completed_at": 1699063291,
- "expired_at": null,
- "failed_at": null,
- "last_error": null,
- "step_details": {
- "type": "message_creation",
- "message_creation": {
- "message_id": "msg_abc123"
- }
- },
- "usage": {
- "prompt_tokens": 123,
- "completion_tokens": 456,
- "total_tokens": 579
- }
- }
- ],
- "first_id": "step_abc123",
- "last_id": "step_abc456",
- "has_more": false
- }
- """;
-
- public const string ToolSteps =
- """
- {
- "object": "list",
- "data": [
- {
- "id": "step_abc987",
- "object": "thread.run.step",
- "created_at": 1699063291,
- "run_id": "run_abc123",
- "assistant_id": "asst_abc123",
- "thread_id": "thread_abc123",
- "type": "tool_calls",
- "status": "in_progress",
- "cancelled_at": null,
- "completed_at": 1699063291,
- "expired_at": null,
- "failed_at": null,
- "last_error": null,
- "step_details": {
- "type": "tool_calls",
- "tool_calls": [
- {
- "id": "tool_1",
- "type": "function",
- "function": {
- "name": "MyPlugin-MyFunction",
- "arguments": "{ \"index\": 3 }",
- "output": "test"
- }
- }
- ]
- },
- "usage": {
- "prompt_tokens": 123,
- "completion_tokens": 456,
- "total_tokens": 579
- }
- }
- ],
- "first_id": "step_abc123",
- "last_id": "step_abc456",
- "has_more": false
- }
- """;
-
- public const string ToolResponse = "{ }";
-
- public const string GetImageMessage =
- """
- {
- "id": "msg_abc123",
- "object": "thread.message",
- "created_at": 1699017614,
- "thread_id": "thread_abc123",
- "role": "user",
- "content": [
- {
- "type": "image_file",
- "image_file": {
- "file_id": "file_123"
- }
- }
- ],
- "assistant_id": "asst_abc123",
- "run_id": "run_abc123"
- }
- """;
-
- public const string GetTextMessage =
- """
- {
- "id": "msg_abc123",
- "object": "thread.message",
- "created_at": 1699017614,
- "thread_id": "thread_abc123",
- "role": "user",
- "content": [
- {
- "type": "text",
- "text": {
- "value": "How does AI work? Explain it in simple terms.",
- "annotations": []
- }
- }
- ],
- "assistant_id": "asst_abc123",
- "run_id": "run_abc123"
- }
- """;
-
- public const string GetTextMessageWithAnnotation =
- """
- {
- "id": "msg_abc123",
- "object": "thread.message",
- "created_at": 1699017614,
- "thread_id": "thread_abc123",
- "role": "user",
- "content": [
- {
- "type": "text",
- "text": {
- "value": "How does AI work? Explain it in simple terms.**f1",
- "annotations": [
- {
- "type": "file_citation",
- "text": "**f1",
- "file_citation": {
- "file_id": "file_123",
- "quote": "does"
- },
- "start_index": 3,
- "end_index": 6
- }
- ]
- }
- }
- ],
- "assistant_id": "asst_abc123",
- "run_id": "run_abc123"
- }
- """;
-
- public const string ListAgentsPageMore =
- """
- {
- "object": "list",
- "data": [
- {
- "id": "asst_abc123",
- "object": "assistant",
- "created_at": 1698982736,
- "name": "Coding Tutor",
- "description": null,
- "model": "gpt-4-turbo",
- "instructions": "You are a helpful assistant designed to make me better at coding!",
- "tools": [],
- "metadata": {}
- },
- {
- "id": "asst_abc456",
- "object": "assistant",
- "created_at": 1698982718,
- "name": "My Assistant",
- "description": null,
- "model": "gpt-4-turbo",
- "instructions": "You are a helpful assistant designed to make me better at coding!",
- "tools": [],
- "metadata": {}
- },
- {
- "id": "asst_abc789",
- "object": "assistant",
- "created_at": 1698982643,
- "name": null,
- "description": null,
- "model": "gpt-4-turbo",
- "instructions": null,
- "tools": [],
- "metadata": {}
- }
- ],
- "first_id": "asst_abc123",
- "last_id": "asst_abc789",
- "has_more": true
- }
- """;
-
- public const string ListAgentsPageFinal =
- """
- {
- "object": "list",
- "data": [
- {
- "id": "asst_abc789",
- "object": "assistant",
- "created_at": 1698982736,
- "name": "Coding Tutor",
- "description": null,
- "model": "gpt-4-turbo",
- "instructions": "You are a helpful assistant designed to make me better at coding!",
- "tools": [],
- "metadata": {}
- }
- ],
- "first_id": "asst_abc789",
- "last_id": "asst_abc789",
- "has_more": false
- }
- """;
-
- public const string ListMessagesPageMore =
- """
- {
- "object": "list",
- "data": [
- {
- "id": "msg_abc123",
- "object": "thread.message",
- "created_at": 1699016383,
- "thread_id": "thread_abc123",
- "role": "user",
- "content": [
- {
- "type": "text",
- "text": {
- "value": "How does AI work? Explain it in simple terms.",
- "annotations": []
- }
- }
- ],
- "file_ids": [],
- "assistant_id": null,
- "run_id": null,
- "metadata": {}
- },
- {
- "id": "msg_abc456",
- "object": "thread.message",
- "created_at": 1699016383,
- "thread_id": "thread_abc123",
- "role": "user",
- "content": [
- {
- "type": "text",
- "text": {
- "value": "Hello, what is AI?",
- "annotations": []
- }
- }
- ],
- "file_ids": [
- "file-abc123"
- ],
- "assistant_id": null,
- "run_id": null,
- "metadata": {}
- }
- ],
- "first_id": "msg_abc123",
- "last_id": "msg_abc456",
- "has_more": true
- }
- """;
-
- public const string ListMessagesPageFinal =
- """
- {
- "object": "list",
- "data": [
- {
- "id": "msg_abc789",
- "object": "thread.message",
- "created_at": 1699016383,
- "thread_id": "thread_abc123",
- "role": "user",
- "content": [
- {
- "type": "text",
- "text": {
- "value": "How does AI work? Explain it in simple terms.",
- "annotations": []
- }
- }
- ],
- "file_ids": [],
- "assistant_id": null,
- "run_id": null,
- "metadata": {}
- }
- ],
- "first_id": "msg_abc789",
- "last_id": "msg_abc789",
- "has_more": false
- }
- """;
- }
}
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantDefinitionTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantDefinitionTests.cs
index 5c28373744a8..b0131ac9be6b 100644
--- a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantDefinitionTests.cs
+++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantDefinitionTests.cs
@@ -98,6 +98,28 @@ public void VerifyOpenAIAssistantDefinitionAssignment()
ValidateSerialization(definition);
}
+ ///
+ /// Verify TemplateFactoryFormat.
+ ///
+ [Fact]
+ public void VerifyOpenAIAssistantDefinitionTemplateFactoryFormat()
+ {
+ // Arrange
+ OpenAIAssistantDefinition definition = new("testmodel");
+
+ // Assert
+ Assert.Null(definition.TemplateFactoryFormat);
+
+ // Act
+ definition = new("testmodel")
+ {
+ Metadata = new Dictionary() { { OpenAIAssistantAgent.TemplateMetadataKey, "testformat" } }
+ };
+
+ // Assert
+ Assert.Equal("testformat", definition.TemplateFactoryFormat);
+ }
+
private static void ValidateSerialization(OpenAIAssistantDefinition source)
{
string json = JsonSerializer.Serialize(source);
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantResponseContent.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantResponseContent.cs
new file mode 100644
index 000000000000..dc99d150f09e
--- /dev/null
+++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantResponseContent.cs
@@ -0,0 +1,741 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System.Linq;
+using System.Net;
+using System.Net.Http;
+using System.Text;
+using System.Text.Json;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents.OpenAI;
+using Xunit;
+
+namespace SemanticKernel.Agents.UnitTests.OpenAI;
+
+///
+/// Mock response payloads for .
+///
+internal static class OpenAIAssistantResponseContent
+{
+ ///
+ /// Setup the response content for the .
+ ///
+ public static void SetupResponse(this HttpMessageHandlerStub messageHandlerStub, HttpStatusCode statusCode, string content)
+ {
+ messageHandlerStub.ResponseToReturn =
+ new HttpResponseMessage(statusCode)
+ {
+ Content = new StringContent(content)
+ };
+ }
+
+ ///
+ /// Setup the response content for the .
+ ///
+ public static void SetupResponses(this HttpMessageHandlerStub messageHandlerStub, HttpStatusCode statusCode, params string[] content)
+ {
+ foreach (var item in content)
+ {
+#pragma warning disable CA2000 // Dispose objects before losing scope
+ messageHandlerStub.ResponseQueue.Enqueue(
+ new(statusCode)
+ {
+ Content = new StringContent(item)
+ });
+#pragma warning restore CA2000 // Dispose objects before losing scope
+ }
+ }
+
+ private const string AssistantId = "asst_abc123";
+ private const string ThreadId = "thread_abc123";
+ private const string RunId = "run_abc123";
+ private const string MessageId = "msg_abc123";
+ private const string StepId = "step_abc123";
+
+ #region Assistant
+
+ ///
+ /// The response for creating or querying an assistant definition.
+ ///
+ public static string AssistantDefinition(OpenAIAssistantCapabilities capabilities, PromptTemplateConfig templateConfig) =>
+ AssistantDefinition(templateConfig.Name, templateConfig.Template, templateConfig.Description, capabilities);
+
+ ///
+ /// The response for creating or querying an assistant definition.
+ ///
+ public static string AssistantDefinition(OpenAIAssistantDefinition definition) =>
+ AssistantDefinition(definition.Name, definition.Instructions, definition.Description, definition);
+
+ ///
+ /// The response for creating or querying an assistant definition.
+ ///
+ public static string AssistantDefinition(
+ string? name,
+ string? instructions,
+ string? description,
+ OpenAIAssistantCapabilities capabilities)
+ {
+ StringBuilder builder = new();
+ builder.AppendLine("{");
+ builder.AppendLine(@$" ""id"": ""{AssistantId}"",");
+ builder.AppendLine(@" ""object"": ""assistant"",");
+ builder.AppendLine(@" ""created_at"": 1698984975,");
+ builder.AppendLine(@$" ""name"": ""{name}"",");
+ builder.AppendLine(@$" ""description"": ""{description}"",");
+ builder.AppendLine(@$" ""instructions"": ""{instructions}"",");
+ builder.AppendLine(@$" ""model"": ""{capabilities.ModelId}"",");
+
+ bool hasCodeInterpreter = capabilities.EnableCodeInterpreter;
+ bool hasCodeInterpreterFiles = (capabilities.CodeInterpreterFileIds?.Count ?? 0) > 0;
+ bool hasFileSearch = capabilities.EnableFileSearch;
+ if (!hasCodeInterpreter && !hasFileSearch)
+ {
+ builder.AppendLine(@" ""tools"": [],");
+ }
+ else
+ {
+ builder.AppendLine(@" ""tools"": [");
+
+ if (hasCodeInterpreter)
+ {
+ builder.Append(@$" {{ ""type"": ""code_interpreter"" }}{(hasFileSearch ? "," : string.Empty)}");
+ }
+
+ if (hasFileSearch)
+ {
+ builder.AppendLine(@" { ""type"": ""file_search"" }");
+ }
+
+ builder.AppendLine(" ],");
+ }
+
+ if (!hasCodeInterpreterFiles && !hasFileSearch)
+ {
+ builder.AppendLine(@" ""tool_resources"": {},");
+ }
+ else
+ {
+ builder.AppendLine(@" ""tool_resources"": {");
+
+ if (hasCodeInterpreterFiles)
+ {
+ string fileIds = string.Join(",", capabilities.CodeInterpreterFileIds!.Select(fileId => "\"" + fileId + "\""));
+ builder.AppendLine(@$" ""code_interpreter"": {{ ""file_ids"": [{fileIds}] }}{(hasFileSearch ? "," : string.Empty)}");
+ }
+
+ if (hasFileSearch)
+ {
+ builder.AppendLine(@$" ""file_search"": {{ ""vector_store_ids"": [""{capabilities.VectorStoreId}""] }}");
+ }
+
+ builder.AppendLine(" },");
+ }
+
+ if (capabilities.Temperature.HasValue)
+ {
+ builder.AppendLine(@$" ""temperature"": {capabilities.Temperature},");
+ }
+
+ if (capabilities.TopP.HasValue)
+ {
+ builder.AppendLine(@$" ""top_p"": {capabilities.TopP},");
+ }
+
+ bool hasExecutionOptions = capabilities.ExecutionOptions != null;
+ int metadataCount = (capabilities.Metadata?.Count ?? 0);
+ if (metadataCount == 0 && !hasExecutionOptions)
+ {
+ builder.AppendLine(@" ""metadata"": {}");
+ }
+ else
+ {
+ int index = 0;
+ builder.AppendLine(@" ""metadata"": {");
+
+ if (hasExecutionOptions)
+ {
+ string serializedExecutionOptions = JsonSerializer.Serialize(capabilities.ExecutionOptions);
+ builder.AppendLine(@$" ""{OpenAIAssistantAgent.OptionsMetadataKey}"": ""{JsonEncodedText.Encode(serializedExecutionOptions)}""{(metadataCount > 0 ? "," : string.Empty)}");
+ }
+
+ if (metadataCount > 0)
+ {
+ foreach (var (key, value) in capabilities.Metadata!)
+ {
+ builder.AppendLine(@$" ""{key}"": ""{value}""{(index < metadataCount - 1 ? "," : string.Empty)}");
+ ++index;
+ }
+ }
+
+ builder.AppendLine(" }");
+ }
+
+ builder.AppendLine("}");
+
+ return builder.ToString();
+ }
+
+ public const string DeleteAgent =
+ $$$"""
+ {
+ "id": "{{{AssistantId}}}",
+ "object": "assistant.deleted",
+ "deleted": true
+ }
+ """;
+
+ public const string CreateThread =
+ $$$"""
+ {
+ "id": "{{{ThreadId}}}",
+ "object": "thread",
+ "created_at": 1699012949,
+ "metadata": {}
+ }
+ """;
+
+ public const string DeleteThread =
+ $$$"""
+ {
+ "id": "{{{ThreadId}}}",
+ "object": "thread.deleted",
+ "deleted": true
+ }
+ """;
+
+ public const string ToolResponse = "{ }";
+
+ public const string GetImageMessage =
+ $$$"""
+ {
+ "id": "{{{MessageId}}}",
+ "object": "thread.message",
+ "created_at": 1699017614,
+ "thread_id": "{{{ThreadId}}}",
+ "role": "user",
+ "content": [
+ {
+ "type": "image_file",
+ "image_file": {
+ "file_id": "file_123"
+ }
+ }
+ ],
+ "assistant_id": "{{{AssistantId}}}",
+ "run_id": "{{{RunId}}}"
+ }
+ """;
+
+ public static string GetTextMessage(string text = "test") =>
+ $$$"""
+ {
+ "id": "{{{MessageId}}}",
+ "object": "thread.message",
+ "created_at": 1699017614,
+ "thread_id": "{{{ThreadId}}}",
+ "role": "user",
+ "content": [
+ {
+ "type": "text",
+ "text": {
+ "value": "{{{text}}}",
+ "annotations": []
+ }
+ }
+ ],
+ "assistant_id": "{{{AssistantId}}}",
+ "run_id": "{{{RunId}}}"
+ }
+ """;
+
+ public const string GetTextMessageWithAnnotation =
+ $$$"""
+ {
+ "id": "{{{MessageId}}}",
+ "object": "thread.message",
+ "created_at": 1699017614,
+ "thread_id": "{{{ThreadId}}}",
+ "role": "user",
+ "content": [
+ {
+ "type": "text",
+ "text": {
+ "value": "How does AI work? Explain it in simple terms.**f1",
+ "annotations": [
+ {
+ "type": "file_citation",
+ "text": "**f1",
+ "file_citation": {
+ "file_id": "file_123",
+ "quote": "does"
+ },
+ "start_index": 3,
+ "end_index": 6
+ }
+ ]
+ }
+ }
+ ],
+ "assistant_id": "{{{AssistantId}}}",
+ "run_id": "{{{RunId}}}"
+ }
+ """;
+
+ public const string ListAgentsPageMore =
+ $$$"""
+ {
+ "object": "list",
+ "data": [
+ {
+ "id": "{{{AssistantId}}}",
+ "object": "assistant",
+ "created_at": 1698982736,
+ "name": "Coding Tutor",
+ "description": null,
+ "model": "gpt-4-turbo",
+ "instructions": "You are a helpful assistant designed to make me better at coding!",
+ "tools": [],
+ "metadata": {}
+ },
+ {
+ "id": "asst_abc456",
+ "object": "assistant",
+ "created_at": 1698982718,
+ "name": "My Assistant",
+ "description": null,
+ "model": "gpt-4-turbo",
+ "instructions": "You are a helpful assistant designed to make me better at coding!",
+ "tools": [],
+ "metadata": {}
+ },
+ {
+ "id": "asst_abc789",
+ "object": "assistant",
+ "created_at": 1698982643,
+ "name": null,
+ "description": null,
+ "model": "gpt-4-turbo",
+ "instructions": null,
+ "tools": [],
+ "metadata": {}
+ }
+ ],
+ "first_id": "{{{AssistantId}}}",
+ "last_id": "asst_abc789",
+ "has_more": true
+ }
+ """;
+
+ public const string ListAgentsPageFinal =
+ """
+ {
+ "object": "list",
+ "data": [
+ {
+ "id": "asst_abc789",
+ "object": "assistant",
+ "created_at": 1698982736,
+ "name": "Coding Tutor",
+ "description": null,
+ "model": "gpt-4-turbo",
+ "instructions": "You are a helpful assistant designed to make me better at coding!",
+ "tools": [],
+ "metadata": {}
+ }
+ ],
+ "first_id": "asst_abc789",
+ "last_id": "asst_abc789",
+ "has_more": false
+ }
+ """;
+
+ public const string ListMessagesPageMore =
+ $$$"""
+ {
+ "object": "list",
+ "data": [
+ {
+ "id": "{{{MessageId}}}",
+ "object": "thread.message",
+ "created_at": 1699016383,
+ "thread_id": "{{{ThreadId}}}",
+ "role": "user",
+ "content": [
+ {
+ "type": "text",
+ "text": {
+ "value": "How does AI work? Explain it in simple terms.",
+ "annotations": []
+ }
+ }
+ ],
+ "file_ids": [],
+ "assistant_id": null,
+ "run_id": null,
+ "metadata": {}
+ },
+ {
+ "id": "msg_abc456",
+ "object": "thread.message",
+ "created_at": 1699016383,
+ "thread_id": "{{{ThreadId}}}",
+ "role": "user",
+ "content": [
+ {
+ "type": "text",
+ "text": {
+ "value": "Hello, what is AI?",
+ "annotations": []
+ }
+ }
+ ],
+ "file_ids": [
+ "file-abc123"
+ ],
+ "assistant_id": null,
+ "run_id": null,
+ "metadata": {}
+ }
+ ],
+ "first_id": "{{{MessageId}}}",
+ "last_id": "msg_abc456",
+ "has_more": true
+ }
+ """;
+
+ public const string ListMessagesPageFinal =
+ $$$"""
+ {
+ "object": "list",
+ "data": [
+ {
+ "id": "msg_abc789",
+ "object": "thread.message",
+ "created_at": 1699016383,
+ "thread_id": "{{{ThreadId}}}",
+ "role": "user",
+ "content": [
+ {
+ "type": "text",
+ "text": {
+ "value": "How does AI work? Explain it in simple terms.",
+ "annotations": []
+ }
+ }
+ ],
+ "file_ids": [],
+ "assistant_id": null,
+ "run_id": null,
+ "metadata": {}
+ }
+ ],
+ "first_id": "msg_abc789",
+ "last_id": "msg_abc789",
+ "has_more": false
+ }
+ """;
+
+ public static string UploadFile =
+ """
+ {
+ "id": "file-abc123",
+ "object": "file",
+ "bytes": 120000,
+ "created_at": 1677610602,
+ "filename": "test.txt",
+ "purpose": "assistant"
+ }
+ """;
+
+ #endregion
+
+ ///
+ /// Response payloads for a "regular" assistant run.
+ ///
+ public static class Run
+ {
+ public const string CreateRun =
+ $$$"""
+ {
+ "id": "{{{RunId}}}",
+ "object": "thread.run",
+ "created_at": 1699063290,
+ "assistant_id": "{{{AssistantId}}}",
+ "thread_id": "{{{ThreadId}}}",
+ "status": "queued",
+ "started_at": 1699063290,
+ "expires_at": null,
+ "cancelled_at": null,
+ "failed_at": null,
+ "completed_at": 1699063291,
+ "last_error": null,
+ "model": "gpt-4-turbo",
+ "instructions": null,
+ "tools": [],
+ "file_ids": [],
+ "metadata": {},
+ "usage": null,
+ "temperature": 1
+ }
+ """;
+
+ public const string PendingRun =
+ $$$"""
+ {
+ "id": "{{{RunId}}}",
+ "object": "thread.run",
+ "created_at": 1699063290,
+ "assistant_id": "{{{AssistantId}}}",
+ "thread_id": "{{{ThreadId}}}",
+ "status": "requires_action",
+ "started_at": 1699063290,
+ "expires_at": null,
+ "cancelled_at": null,
+ "failed_at": null,
+ "completed_at": 1699063291,
+ "last_error": null,
+ "model": "gpt-4-turbo",
+ "instructions": null,
+ "tools": [],
+ "file_ids": [],
+ "metadata": {},
+ "usage": null,
+ "temperature": 1
+ }
+ """;
+
+ public const string CompletedRun =
+ $$$"""
+ {
+ "id": "{{{RunId}}}",
+ "object": "thread.run",
+ "created_at": 1699063290,
+ "assistant_id": "{{{AssistantId}}}",
+ "thread_id": "{{{ThreadId}}}",
+ "status": "completed",
+ "started_at": 1699063290,
+ "expires_at": null,
+ "cancelled_at": null,
+ "failed_at": null,
+ "completed_at": 1699063291,
+ "last_error": null,
+ "model": "gpt-4-turbo",
+ "instructions": null,
+ "tools": [],
+ "file_ids": [],
+ "metadata": {},
+ "usage": null,
+ "temperature": 1
+ }
+ """;
+
+ public const string MessageSteps =
+ $$$"""
+ {
+ "object": "list",
+ "data": [
+ {
+ "id": "{{{StepId}}}",
+ "object": "thread.run.step",
+ "created_at": 1699063291,
+ "run_id": "{{{RunId}}}",
+ "assistant_id": "{{{AssistantId}}}",
+ "thread_id": "{{{ThreadId}}}",
+ "type": "message_creation",
+ "status": "completed",
+ "cancelled_at": null,
+ "completed_at": 1699063291,
+ "expired_at": null,
+ "failed_at": null,
+ "last_error": null,
+ "step_details": {
+ "type": "message_creation",
+ "message_creation": {
+ "message_id": "{{{MessageId}}}"
+ }
+ },
+ "usage": {
+ "prompt_tokens": 123,
+ "completion_tokens": 456,
+ "total_tokens": 579
+ }
+ }
+ ],
+ "first_id": "{{{StepId}}}",
+ "last_id": "step_abc456",
+ "has_more": false
+ }
+ """;
+
+ public const string ToolSteps =
+ $$$"""
+ {
+ "object": "list",
+ "data": [
+ {
+ "id": "step_abc987",
+ "object": "thread.run.step",
+ "created_at": 1699063291,
+ "run_id": "{{{RunId}}}",
+ "assistant_id": "{{{AssistantId}}}",
+ "thread_id": "{{{ThreadId}}}",
+ "type": "tool_calls",
+ "status": "in_progress",
+ "cancelled_at": null,
+ "completed_at": 1699063291,
+ "expired_at": null,
+ "failed_at": null,
+ "last_error": null,
+ "step_details": {
+ "type": "tool_calls",
+ "tool_calls": [
+ {
+ "id": "tool_1",
+ "type": "function",
+ "function": {
+ "name": "MyPlugin-MyFunction",
+ "arguments": "{ \"index\": 3 }",
+ "output": "test"
+ }
+ }
+ ]
+ },
+ "usage": {
+ "prompt_tokens": 123,
+ "completion_tokens": 456,
+ "total_tokens": 579
+ }
+ }
+ ],
+ "first_id": "{{{StepId}}}",
+ "last_id": "step_abc456",
+ "has_more": false
+ }
+ """;
+ }
+
+ ///
+ /// Response payloads for a streaming assistant run.
+ ///
+ public static class Streaming
+ {
+ public static string Response(params string[] eventPayloads)
+ {
+ StringBuilder builder = new();
+
+ foreach (string payload in eventPayloads)
+ {
+ builder.Append(payload);
+ builder.AppendLine();
+ builder.AppendLine();
+ }
+
+ return builder.ToString();
+ }
+
+ public const string Done =
+ """
+ event: thread.done
+ data: [DONE]
+ """;
+
+ public static string CreateRun(string eventType)
+ {
+ int? createdAt = null;
+ int? startedAt = null;
+ int? completedAt = null;
+ int? expiresAt = null;
+ string? status = null;
+
+ switch (eventType)
+ {
+ case "created":
+ status = "created";
+ createdAt = 1725978974;
+ break;
+ case "queued":
+ status = "queued";
+ createdAt = 1725978974;
+ break;
+ case "in_progress":
+ status = "in_progress";
+ createdAt = 1725978974;
+ startedAt = 1725978975;
+ expiresAt = 1725979576;
+ break;
+ case "completed":
+ status = "completed";
+ createdAt = 1725978974;
+ startedAt = 1725978975;
+ expiresAt = 1725979576;
+ completedAt = 1725978976;
+ break;
+ }
+
+ Assert.NotNull(status);
+
+ return
+ CreateEvent(
+ $"thread.run.{eventType}",
+ $$$"""
+ {
+ "id": "{{{RunId}}}",
+ "object": "thread.run",
+ "assistant_id": "{{{AssistantId}}}",
+ "thread_id": "{{{ThreadId}}}",
+ "status": "{{{status}}}",
+ "created_at": {{{ParseTimestamp(createdAt)}}},
+ "started_at": {{{ParseTimestamp(startedAt)}}},
+ "expires_at": {{{ParseTimestamp(expiresAt)}}},
+ "completed_at": {{{ParseTimestamp(completedAt)}}},
+ "required_action": null,
+ "model": "gpt-4o-mini",
+ "instructions": "test",
+ "tools": [],
+ "metadata": {},
+ "temperature": 1.0,
+ "top_p": 1.0,
+ "truncation_strategy": { "type": "auto" },
+ "incomplete_details": null,
+ "usage": null,
+ "response_format": "auto",
+ "tool_choice": "auto",
+ "parallel_tool_calls": true
+ }
+ """);
+ }
+
+ public static string DeltaMessage(string text) =>
+ CreateEvent(
+ "thread.message.delta",
+ $$$"""
+ {
+ "id": "{{{MessageId}}}",
+ "object": "thread.message.delta",
+ "delta": {
+ "content": [
+ {
+ "index": 0,
+ "type": "text",
+ "text": { "value": "{{{text}}}", "annotations": [] }
+ }
+ ]
+ }
+ }
+ """);
+
+ private static string ParseTimestamp(int? timestamp)
+ {
+ if (timestamp.HasValue)
+ {
+ return timestamp.Value.ToString();
+ }
+
+ return "0";
+ }
+
+ private static string CreateEvent(string eventType, string data) =>
+ $"""
+ event: {eventType}
+ data: {data.Replace("\n", string.Empty).Replace("\r", string.Empty)}
+ """;
+ }
+}
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIClientProviderTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIClientProviderTests.cs
index 7799eb26c305..b9cfcf55a5b1 100644
--- a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIClientProviderTests.cs
+++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIClientProviderTests.cs
@@ -4,6 +4,7 @@
using Azure.Core;
using Microsoft.SemanticKernel.Agents.OpenAI;
using Moq;
+using OpenAI;
using Xunit;
namespace SemanticKernel.Agents.UnitTests.OpenAI;
@@ -17,9 +18,9 @@ public class OpenAIClientProviderTests
/// Verify that provisioning of client for Azure OpenAI.
///
[Fact]
- public void VerifyOpenAIClientFactoryTargetAzureByKey()
+ public void VerifyOpenAIClientProviderTargetAzureByKey()
{
- // Arrange
+ // Act
OpenAIClientProvider provider = OpenAIClientProvider.ForAzureOpenAI("key", new Uri("https://localhost"));
// Assert
@@ -30,10 +31,12 @@ public void VerifyOpenAIClientFactoryTargetAzureByKey()
/// Verify that provisioning of client for Azure OpenAI.
///
[Fact]
- public void VerifyOpenAIClientFactoryTargetAzureByCredential()
+ public void VerifyOpenAIClientProviderTargetAzureByCredential()
{
// Arrange
Mock mockCredential = new();
+
+ // Act
OpenAIClientProvider provider = OpenAIClientProvider.ForAzureOpenAI(mockCredential.Object, new Uri("https://localhost"));
// Assert
@@ -46,9 +49,9 @@ public void VerifyOpenAIClientFactoryTargetAzureByCredential()
[Theory]
[InlineData(null)]
[InlineData("http://myproxy:9819")]
- public void VerifyOpenAIClientFactoryTargetOpenAINoKey(string? endpoint)
+ public void VerifyOpenAIClientProviderTargetOpenAINoKey(string? endpoint)
{
- // Arrange
+ // Act
OpenAIClientProvider provider = OpenAIClientProvider.ForOpenAI(endpoint != null ? new Uri(endpoint) : null);
// Assert
@@ -61,9 +64,9 @@ public void VerifyOpenAIClientFactoryTargetOpenAINoKey(string? endpoint)
[Theory]
[InlineData("key", null)]
[InlineData("key", "http://myproxy:9819")]
- public void VerifyOpenAIClientFactoryTargetOpenAIByKey(string key, string? endpoint)
+ public void VerifyOpenAIClientProviderTargetOpenAIByKey(string key, string? endpoint)
{
- // Arrange
+ // Act
OpenAIClientProvider provider = OpenAIClientProvider.ForOpenAI(key, endpoint != null ? new Uri(endpoint) : null);
// Assert
@@ -74,13 +77,59 @@ public void VerifyOpenAIClientFactoryTargetOpenAIByKey(string key, string? endpo
/// Verify that the factory can create a client with http proxy.
///
[Fact]
- public void VerifyOpenAIClientFactoryWithHttpClient()
+ public void VerifyOpenAIClientProviderWithHttpClient()
{
// Arrange
using HttpClient httpClient = new() { BaseAddress = new Uri("http://myproxy:9819") };
+
+ // Act
OpenAIClientProvider provider = OpenAIClientProvider.ForOpenAI(httpClient: httpClient);
// Assert
Assert.NotNull(provider.Client);
+
+ // Arrange
+ using HttpClient httpClientWithHeaders = new() { BaseAddress = new Uri("http://myproxy:9819") };
+ httpClient.DefaultRequestHeaders.Add("X-Test", "Test");
+
+ // Act
+ OpenAIClientProvider providerWithHeaders = OpenAIClientProvider.ForOpenAI(httpClient: httpClient);
+
+ // Assert
+ Assert.NotNull(providerWithHeaders.Client);
+
+ Assert.NotEqual(provider.ConfigurationKeys.Count, providerWithHeaders.ConfigurationKeys.Count);
+ }
+
+ ///
+ /// Verify that the factory can create a client with http proxy.
+ ///
+ [Fact]
+ public void VerifyOpenAIClientProviderWithHttpClientHeaders()
+ {
+ // Arrange
+ using HttpClient httpClient = new() { BaseAddress = new Uri("http://myproxy:9819") };
+ httpClient.DefaultRequestHeaders.Add("X-Test", "Test");
+
+ // Act
+ OpenAIClientProvider provider = OpenAIClientProvider.ForOpenAI(httpClient: httpClient);
+
+ // Assert
+ Assert.NotNull(provider.Client);
+ }
+
+ ///
+ /// Verify that the factory can accept an client that already exists.
+ ///
+ [Fact]
+ public void VerifyOpenAIClientProviderFromClient()
+ {
+ // Arrange
+ Mock mockClient = new();
+ OpenAIClientProvider provider = OpenAIClientProvider.FromClient(mockClient.Object);
+
+ // Assert
+ Assert.NotNull(provider.Client);
+ Assert.Equal(mockClient.Object, provider.Client);
}
}
diff --git a/dotnet/src/IntegrationTests/Agents/MixedAgentTests.cs b/dotnet/src/IntegrationTests/Agents/MixedAgentTests.cs
index 94e9b6c34eaf..989310838ff8 100644
--- a/dotnet/src/IntegrationTests/Agents/MixedAgentTests.cs
+++ b/dotnet/src/IntegrationTests/Agents/MixedAgentTests.cs
@@ -93,12 +93,12 @@ private async Task VerifyAgentExecutionAsync(
// Assistant doesn't need plug-in since it has access to the shared function result.
OpenAIAssistantAgent assistantAgent =
await OpenAIAssistantAgent.CreateAsync(
- kernel: new(),
config,
new(modelName)
{
Instructions = "Answer questions about the menu."
- });
+ },
+ new Kernel());
// Act & Assert
try
diff --git a/dotnet/src/IntegrationTests/Agents/OpenAIAssistantAgentTests.cs b/dotnet/src/IntegrationTests/Agents/OpenAIAssistantAgentTests.cs
index e2d1ef2b1bfe..b96f325e1ed3 100644
--- a/dotnet/src/IntegrationTests/Agents/OpenAIAssistantAgentTests.cs
+++ b/dotnet/src/IntegrationTests/Agents/OpenAIAssistantAgentTests.cs
@@ -113,12 +113,12 @@ private async Task ExecuteAgentAsync(
OpenAIAssistantAgent agent =
await OpenAIAssistantAgent.CreateAsync(
- kernel,
config,
new(modelName)
{
Instructions = "Answer questions about the menu.",
- });
+ },
+ kernel);
try
{
@@ -155,12 +155,12 @@ private async Task ExecuteStreamingAgentAsync(
OpenAIAssistantAgent agent =
await OpenAIAssistantAgent.CreateAsync(
- kernel,
config,
new(modelName)
{
Instructions = "Answer questions about the menu.",
- });
+ },
+ kernel);
AgentGroupChat chat = new();
chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input));