Skip to content

Commit 8c1f00d

Browse files
authored
Merge branch 'main' into copilot/fix-message-ordering-issue
2 parents 9f28cfc + f432610 commit 8c1f00d

File tree

7 files changed

+221
-7
lines changed

7 files changed

+221
-7
lines changed
Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
{
2+
"solution": {
3+
"path": "agent-framework-dotnet.slnx",
4+
"projects": [
5+
"src\\Microsoft.Agents.AI.A2A\\Microsoft.Agents.AI.A2A.csproj",
6+
"src\\Microsoft.Agents.AI.Abstractions\\Microsoft.Agents.AI.Abstractions.csproj",
7+
"src\\Microsoft.Agents.AI.AGUI\\Microsoft.Agents.AI.AGUI.csproj",
8+
"src\\Microsoft.Agents.AI.Anthropic\\Microsoft.Agents.AI.Anthropic.csproj",
9+
"src\\Microsoft.Agents.AI.AzureAI.Persistent\\Microsoft.Agents.AI.AzureAI.Persistent.csproj",
10+
"src\\Microsoft.Agents.AI.AzureAI\\Microsoft.Agents.AI.AzureAI.csproj",
11+
"src\\Microsoft.Agents.AI.CopilotStudio\\Microsoft.Agents.AI.CopilotStudio.csproj",
12+
"src\\Microsoft.Agents.AI.CosmosNoSql\\Microsoft.Agents.AI.CosmosNoSql.csproj",
13+
"src\\Microsoft.Agents.AI.Declarative\\Microsoft.Agents.AI.Declarative.csproj",
14+
"src\\Microsoft.Agents.AI.DevUI\\Microsoft.Agents.AI.DevUI.csproj",
15+
"src\\Microsoft.Agents.AI.DurableTask\\Microsoft.Agents.AI.DurableTask.csproj",
16+
"src\\Microsoft.Agents.AI.Hosting.A2A.AspNetCore\\Microsoft.Agents.AI.Hosting.A2A.AspNetCore.csproj",
17+
"src\\Microsoft.Agents.AI.Hosting.A2A\\Microsoft.Agents.AI.Hosting.A2A.csproj",
18+
"src\\Microsoft.Agents.AI.Hosting.AGUI.AspNetCore\\Microsoft.Agents.AI.Hosting.AGUI.AspNetCore.csproj",
19+
"src\\Microsoft.Agents.AI.Hosting.AzureFunctions\\Microsoft.Agents.AI.Hosting.AzureFunctions.csproj",
20+
"src\\Microsoft.Agents.AI.Hosting.OpenAI\\Microsoft.Agents.AI.Hosting.OpenAI.csproj",
21+
"src\\Microsoft.Agents.AI.Hosting\\Microsoft.Agents.AI.Hosting.csproj",
22+
"src\\Microsoft.Agents.AI.Mem0\\Microsoft.Agents.AI.Mem0.csproj",
23+
"src\\Microsoft.Agents.AI.OpenAI\\Microsoft.Agents.AI.OpenAI.csproj",
24+
"src\\Microsoft.Agents.AI.Purview\\Microsoft.Agents.AI.Purview.csproj",
25+
"src\\Microsoft.Agents.AI.Workflows.Declarative.AzureAI\\Microsoft.Agents.AI.Workflows.Declarative.AzureAI.csproj",
26+
"src\\Microsoft.Agents.AI.Workflows.Declarative\\Microsoft.Agents.AI.Workflows.Declarative.csproj",
27+
"src\\Microsoft.Agents.AI.Workflows\\Microsoft.Agents.AI.Workflows.csproj",
28+
"src\\Microsoft.Agents.AI\\Microsoft.Agents.AI.csproj"
29+
]
30+
}
31+
}

dotnet/src/Microsoft.Agents.AI.Hosting.OpenAI/Responses/AIAgentResponseExecutor.cs

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,13 @@ public async IAsyncEnumerable<StreamingResponseEvent> ExecuteAsync(
3636
// Create options with properties from the request
3737
var chatOptions = new ChatOptions
3838
{
39-
ConversationId = request.Conversation?.Id,
39+
// Note: We intentionally do NOT set ConversationId on ChatOptions here.
40+
// The conversation ID from the client request is used by the hosting layer
41+
// to manage conversation storage, but should not be forwarded to the underlying
42+
// IChatClient as it has its own concept of conversations (or none at all).
43+
// ---
44+
// ConversationId = request.Conversation?.Id,
45+
4046
Temperature = (float?)request.Temperature,
4147
TopP = (float?)request.TopP,
4248
MaxOutputTokens = request.MaxOutputTokens,

dotnet/src/Microsoft.Agents.AI.Hosting.OpenAI/Responses/HostedAgentResponseExecutor.cs

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -82,9 +82,16 @@ public async IAsyncEnumerable<StreamingResponseEvent> ExecuteAsync(
8282
{
8383
string agentName = GetAgentName(request)!;
8484
AIAgent agent = this._serviceProvider.GetRequiredKeyedService<AIAgent>(agentName);
85+
8586
var chatOptions = new ChatOptions
8687
{
87-
ConversationId = request.Conversation?.Id,
88+
// Note: We intentionally do NOT set ConversationId on ChatOptions here.
89+
// The conversation ID from the client request is used by the hosting layer
90+
// to manage conversation storage, but should not be forwarded to the underlying
91+
// IChatClient as it has its own concept of conversations (or none at all).
92+
// ---
93+
// ConversationId = request.Conversation?.Id,
94+
8895
Temperature = (float?)request.Temperature,
8996
TopP = (float?)request.TopP,
9097
MaxOutputTokens = request.MaxOutputTokens,

dotnet/tests/Microsoft.Agents.AI.Hosting.OpenAI.UnitTests/OpenAIResponsesIntegrationTests.cs

Lines changed: 147 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1091,6 +1091,116 @@ public async Task CreateResponseStreaming_ContentPartAdded_IncludesEventAsync()
10911091
Assert.Contains(updates, u => u is StreamingResponseContentPartAddedUpdate);
10921092
}
10931093

1094+
/// <summary>
1095+
/// Verifies that when a client provides a conversation ID, the underlying IChatClient
1096+
/// does NOT receive that conversation ID via ChatOptions.ConversationId.
1097+
/// This ensures that the host's conversation management is separate from the IChatClient's
1098+
/// conversation handling (if any).
1099+
/// </summary>
1100+
[Fact]
1101+
public async Task CreateResponse_WithConversationId_DoesNotForwardConversationIdToIChatClientAsync()
1102+
{
1103+
// Arrange
1104+
const string AgentName = "conversation-id-agent";
1105+
const string Instructions = "You are a helpful assistant.";
1106+
const string ExpectedResponse = "Response";
1107+
1108+
this._httpClient = await this.CreateTestServerWithConversationsAsync(AgentName, Instructions, ExpectedResponse);
1109+
var mockChatClient = this.ResolveMockChatClient();
1110+
1111+
// First, create a conversation
1112+
var createConversationRequest = new { metadata = new { agent_id = AgentName } };
1113+
string createConvJson = System.Text.Json.JsonSerializer.Serialize(createConversationRequest);
1114+
using StringContent createConvContent = new(createConvJson, Encoding.UTF8, "application/json");
1115+
HttpResponseMessage createConvResponse = await this._httpClient.PostAsync(
1116+
new Uri("/v1/conversations", UriKind.Relative),
1117+
createConvContent);
1118+
Assert.True(createConvResponse.IsSuccessStatusCode, $"Create conversation failed: {createConvResponse.StatusCode}");
1119+
1120+
string convResponseJson = await createConvResponse.Content.ReadAsStringAsync();
1121+
using var convDoc = System.Text.Json.JsonDocument.Parse(convResponseJson);
1122+
string conversationId = convDoc.RootElement.GetProperty("id").GetString()!;
1123+
1124+
// Act - Send request with conversation ID using raw HTTP
1125+
// (OpenAI SDK doesn't expose ConversationId directly on ResponseCreationOptions)
1126+
var requestBody = new
1127+
{
1128+
input = "Test",
1129+
agent = new { name = AgentName },
1130+
conversation = conversationId,
1131+
stream = false
1132+
};
1133+
string requestJson = System.Text.Json.JsonSerializer.Serialize(requestBody);
1134+
using StringContent content = new(requestJson, Encoding.UTF8, "application/json");
1135+
HttpResponseMessage httpResponse = await this._httpClient.PostAsync(
1136+
new Uri($"/{AgentName}/v1/responses", UriKind.Relative),
1137+
content);
1138+
1139+
// Assert - Response is successful
1140+
Assert.True(httpResponse.IsSuccessStatusCode, $"Response status: {httpResponse.StatusCode}");
1141+
1142+
// Assert - The IChatClient should have received ChatOptions, but without the ConversationId set
1143+
Assert.NotNull(mockChatClient.LastChatOptions);
1144+
Assert.Null(mockChatClient.LastChatOptions.ConversationId);
1145+
}
1146+
1147+
/// <summary>
1148+
/// Verifies that when a client provides a conversation ID in streaming mode, the underlying
1149+
/// IChatClient does NOT receive that conversation ID via ChatOptions.ConversationId.
1150+
/// </summary>
1151+
[Fact]
1152+
public async Task CreateResponseStreaming_WithConversationId_DoesNotForwardConversationIdToIChatClientAsync()
1153+
{
1154+
// Arrange
1155+
const string AgentName = "conversation-streaming-agent";
1156+
const string Instructions = "You are a helpful assistant.";
1157+
const string ExpectedResponse = "Streaming response";
1158+
1159+
this._httpClient = await this.CreateTestServerWithConversationsAsync(AgentName, Instructions, ExpectedResponse);
1160+
var mockChatClient = this.ResolveMockChatClient();
1161+
1162+
// First, create a conversation
1163+
var createConversationRequest = new { metadata = new { agent_id = AgentName } };
1164+
string createConvJson = System.Text.Json.JsonSerializer.Serialize(createConversationRequest);
1165+
using StringContent createConvContent = new(createConvJson, Encoding.UTF8, "application/json");
1166+
HttpResponseMessage createConvResponse = await this._httpClient.PostAsync(
1167+
new Uri("/v1/conversations", UriKind.Relative),
1168+
createConvContent);
1169+
Assert.True(createConvResponse.IsSuccessStatusCode, $"Create conversation failed: {createConvResponse.StatusCode}");
1170+
1171+
string convResponseJson = await createConvResponse.Content.ReadAsStringAsync();
1172+
using var convDoc = System.Text.Json.JsonDocument.Parse(convResponseJson);
1173+
string conversationId = convDoc.RootElement.GetProperty("id").GetString()!;
1174+
1175+
// Act - Send streaming request with conversation ID using raw HTTP
1176+
var requestBody = new
1177+
{
1178+
input = "Test",
1179+
agent = new { name = AgentName },
1180+
conversation = conversationId,
1181+
stream = true
1182+
};
1183+
string requestJson = System.Text.Json.JsonSerializer.Serialize(requestBody);
1184+
using StringContent content = new(requestJson, Encoding.UTF8, "application/json");
1185+
HttpResponseMessage httpResponse = await this._httpClient.PostAsync(
1186+
new Uri($"/{AgentName}/v1/responses", UriKind.Relative),
1187+
content);
1188+
1189+
// Assert - Response is successful and is SSE
1190+
Assert.True(httpResponse.IsSuccessStatusCode, $"Response status: {httpResponse.StatusCode}");
1191+
Assert.Equal("text/event-stream", httpResponse.Content.Headers.ContentType?.MediaType);
1192+
1193+
// Consume the SSE stream to complete the request
1194+
string sseContent = await httpResponse.Content.ReadAsStringAsync();
1195+
1196+
// Verify streaming completed successfully by checking for response.completed event
1197+
Assert.Contains("response.completed", sseContent);
1198+
1199+
// Assert - The IChatClient should have received ChatOptions, but without the ConversationId set
1200+
Assert.NotNull(mockChatClient.LastChatOptions);
1201+
Assert.Null(mockChatClient.LastChatOptions.ConversationId);
1202+
}
1203+
10941204
private OpenAIResponseClient CreateResponseClient(string agentName)
10951205
{
10961206
return new OpenAIResponseClient(
@@ -1103,6 +1213,19 @@ private OpenAIResponseClient CreateResponseClient(string agentName)
11031213
});
11041214
}
11051215

1216+
private TestHelpers.SimpleMockChatClient ResolveMockChatClient()
1217+
{
1218+
ArgumentNullException.ThrowIfNull(this._app, nameof(this._app));
1219+
1220+
var chatClient = this._app.Services.GetRequiredKeyedService<IChatClient>("chat-client");
1221+
if (chatClient is not TestHelpers.SimpleMockChatClient mockChatClient)
1222+
{
1223+
throw new InvalidOperationException("Mock chat client not found or of incorrect type.");
1224+
}
1225+
1226+
return mockChatClient;
1227+
}
1228+
11061229
private async Task<HttpClient> CreateTestServerAsync(string agentName, string instructions, string responseText = "Test response")
11071230
{
11081231
WebApplicationBuilder builder = WebApplication.CreateBuilder();
@@ -1125,6 +1248,30 @@ private async Task<HttpClient> CreateTestServerAsync(string agentName, string in
11251248
return testServer.CreateClient();
11261249
}
11271250

1251+
private async Task<HttpClient> CreateTestServerWithConversationsAsync(string agentName, string instructions, string responseText = "Test response")
1252+
{
1253+
WebApplicationBuilder builder = WebApplication.CreateBuilder();
1254+
builder.WebHost.UseTestServer();
1255+
1256+
IChatClient mockChatClient = new TestHelpers.SimpleMockChatClient(responseText);
1257+
builder.Services.AddKeyedSingleton("chat-client", mockChatClient);
1258+
builder.AddOpenAIResponses();
1259+
builder.AddOpenAIConversations();
1260+
builder.AddAIAgent(agentName, instructions, chatClientServiceKey: "chat-client");
1261+
1262+
this._app = builder.Build();
1263+
AIAgent agent = this._app.Services.GetRequiredKeyedService<AIAgent>(agentName);
1264+
this._app.MapOpenAIResponses(agent);
1265+
this._app.MapOpenAIConversations();
1266+
1267+
await this._app.StartAsync();
1268+
1269+
TestServer testServer = this._app.Services.GetRequiredService<IServer>() as TestServer
1270+
?? throw new InvalidOperationException("TestServer not found");
1271+
1272+
return testServer.CreateClient();
1273+
}
1274+
11281275
private async Task<HttpClient> CreateTestServerWithCustomClientAsync(string agentName, string instructions, IChatClient chatClient)
11291276
{
11301277
WebApplicationBuilder builder = WebApplication.CreateBuilder();

dotnet/tests/Microsoft.Agents.AI.Hosting.OpenAI.UnitTests/TestHelpers.cs

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,8 @@ internal sealed class SimpleMockChatClient : IChatClient
1919
{
2020
private readonly string _responseText;
2121

22+
public ChatOptions? LastChatOptions { get; private set; }
23+
2224
public SimpleMockChatClient(string responseText = "Test response")
2325
{
2426
this._responseText = responseText;
@@ -31,6 +33,11 @@ public Task<ChatResponse> GetResponseAsync(
3133
ChatOptions? options = null,
3234
CancellationToken cancellationToken = default)
3335
{
36+
if (options is not null)
37+
{
38+
this.LastChatOptions = options;
39+
}
40+
3441
// Count input messages to simulate context size
3542
int messageCount = messages.Count();
3643
ChatMessage message = new(ChatRole.Assistant, this._responseText);
@@ -53,6 +60,11 @@ public async IAsyncEnumerable<ChatResponseUpdate> GetStreamingResponseAsync(
5360
ChatOptions? options = null,
5461
[EnumeratorCancellation] CancellationToken cancellationToken = default)
5562
{
63+
if (options is not null)
64+
{
65+
this.LastChatOptions = options;
66+
}
67+
5668
await Task.Delay(1, cancellationToken);
5769

5870
// Count input messages to simulate context size

python/packages/core/agent_framework/_types.py

Lines changed: 15 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2125,20 +2125,31 @@ def text(self) -> str:
21252125
return " ".join(content.text for content in self.contents if isinstance(content, TextContent))
21262126

21272127

2128-
def prepare_messages(messages: str | ChatMessage | list[str] | list[ChatMessage]) -> list[ChatMessage]:
2128+
def prepare_messages(
2129+
messages: str | ChatMessage | list[str] | list[ChatMessage], system_instructions: str | list[str] | None = None
2130+
) -> list[ChatMessage]:
21292131
"""Convert various message input formats into a list of ChatMessage objects.
21302132
21312133
Args:
21322134
messages: The input messages in various supported formats.
2135+
system_instructions: The system instructions. They will be inserted to the start of the messages list.
21332136
21342137
Returns:
21352138
A list of ChatMessage objects.
21362139
"""
2140+
if system_instructions is not None:
2141+
if isinstance(system_instructions, str):
2142+
system_instructions = [system_instructions]
2143+
system_instruction_messages = [ChatMessage(role="system", text=instr) for instr in system_instructions]
2144+
else:
2145+
system_instruction_messages = []
2146+
21372147
if isinstance(messages, str):
2138-
return [ChatMessage(role="user", text=messages)]
2148+
return [*system_instruction_messages, ChatMessage(role="user", text=messages)]
21392149
if isinstance(messages, ChatMessage):
2140-
return [messages]
2141-
return_messages: list[ChatMessage] = []
2150+
return [*system_instruction_messages, messages]
2151+
2152+
return_messages: list[ChatMessage] = system_instruction_messages
21422153
for msg in messages:
21432154
if isinstance(msg, str):
21442155
msg = ChatMessage(role="user", text=msg)

python/packages/core/agent_framework/observability.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1422,7 +1422,7 @@ def _capture_messages(
14221422
"""Log messages with extra information."""
14231423
from ._types import prepare_messages
14241424

1425-
prepped = prepare_messages(messages)
1425+
prepped = prepare_messages(messages, system_instructions=system_instructions)
14261426
otel_messages: list[dict[str, Any]] = []
14271427
for index, message in enumerate(prepped):
14281428
otel_messages.append(_to_otel_message(message))

0 commit comments

Comments
 (0)