Files
autogen/dotnet/test/AutoGen.Tests/OpenAIMessageTests.cs
Xiaoyun Zhang 600bd3f2fe Bring Dotnet AutoGen (#924)
* update readme

* update

* update

* update

* update

* update

* update

* add sample project

* revert notebook change back

* update

* update interactive version

* add nuget package

* refactor Message

* update example

* add azure nightly build pipeline

* Set up CI with Azure Pipelines

[skip ci]

* Update nightly-build.yml for Azure Pipelines

* add dotnet interactive package

* add dotnet interactive package

* update pipeline

* add nuget feed back

* remove dotnet-tool feed

* remove dotnet-tool feed comment

* update pipeline

* update build name

* Update nightly-build.yml

* Delete .github/workflows/dotnet-ci.yml

* update

* add working_dir to use step

* add initateChat api

* update oai package

* Update dotnet-build.yml

* Update dotnet-run-openai-test-and-notebooks.yml

* update build workflow

* update build workflow

* update nuget feed

* update nuget feed

* update aoai and sk version

* Update InteractiveService.cs

* add support for GPT 4V

* add DalleAndGPT4V example

* update example

* add user proxy agent

* add readme

* bump version

* update example

* add dotnet interactive hook

* update

* udpate tests

* add website

* update index.md

* add docs

* update doc

* move sk dependency out of core package

* udpate doc

* Update Use-function-call.md

* add type safe function call document

* update doc

* update doc

* add dock

* Update Use-function-call.md

* add GenerateReplyOptions

* remove IChatLLM

* update version

* update doc

* update website

* add sample

* fix link

* add middleware agent

* clean up doc

* bump version

* update doc

* update

* add Other Language

* remove warnings

* add sign.props

* add sign step

* fix pipelien

* auth

* real sign

* disable PR trigger

* update

* disable PR trigger

* use microbuild machine

* update build pipeline to add publish to internal feed

* add internal feed

* fix build pipeline

* add dotnet prefix

* update ci

* add build number

* update run number

* update source

* update token

* update

* remove adding source

* add publish to github package

* try again

* try again

* ask for write pacakge

* disable package when branch is not main

* update

* implement streaming agent

* add test for streaming function call

* update

* fix #1588

* enable PR check for dotnet branch

* add website readme

* only publish to dotnet feed when pushing to dotnet branch

* remove openai-test-and-notebooks workflow

* update readme

* update readme

* update workflow

* update getting-start

* upgrade test and sample proejct to use .net 8

* fix global.json format && make loadFromConfig API internal only before implementing

* update

* add support for LM studio

* add doc

* Update README.md

* add push and workflow_dispatch trigger

* disable PR for main

* add dotnet env

* Update Installation.md

* add nuget

* refer to newtonsoft 13

* update branch to dotnet in docfx

* Update Installation.md

* pull out HumanInputMiddleware and FunctionCallMiddleware

* fix tests

* add link to sample folder

* refactor message

* refactor over IMessage

* add more tests

* add more test

* fix build error

* rename header

* add semantic kernel project

* update sk example

* update dotnet version

* add LMStudio function call example

* rename LLaMAFunctin

* remove dotnet run openai test and notebook workflow

* add FunctionContract and test

* update doc

* add documents

* add workflow

* update

* update sample

* fix warning in test

* reult length can be less then maximumOutputToKeep (#1804)

* merge with main

* add option to retrieve inner agent and middlewares from MiddlewareAgent

* update doc

* adjust namespace

* update readme

* fix test

* use IMessage

* more updates

* update

* fix test

* add comments

* use FunctionContract to replace FunctionDefinition

* move AutoGen contrac to AutoGen.Core

* update installation

* refactor streamingAgent by adding StreamingMessage type

* update sample

* update samples

* update

* update

* add test

* fix test

* bump version

* add openaichat test

* update

* Update Example03_Agent_FunctionCall.cs

* [.Net] improve docs (#1862)

* add doc

* add doc

* add doc

* add doc

* add doc

* add doc

* update

* fix test error

* fix some error

* fix test

* fix test

* add more tests

* edits

---------

Co-authored-by: ekzhu <ekzhu@users.noreply.github.com>

* [.Net] Add fill form example (#1911)

* add form filler example

* update

* fix ci error

* [.Net] Add using AutoGen.Core in source generator (#1983)

* fix using namespace bug in source generator

* remove using in sourcegenerator test

* disable PR test

* Add .idea to .gitignore (#1988)

* [.Net] publish to nuget.org feed (#1987)

* publish to nuget

* update ci

* update dotnet-release

* update release pipeline

* add source

* remove empty symbol package

* update pipeline

* remove tag

* update installation guide

* [.Net] Rename some classes && APIs based on doc review (#1980)

* rename sequential group chat to round robin group chat

* rename to sendInstruction

* rename workflow to graph

* rename some api

* bump version

* move Graph to GroupChat folder

* rename fill application example

* [.Net] Improve package description (#2161)

* add discord link and update package description

* Update getting-start.md

* [.Net] Fix document comment from the most recent AutoGen.Net engineer sync (#2231)

* update

* rename RegisterPrintMessageHook to RegisterPrintMessage

* update website

* update update.md

* fix link error

* [.Net] Enable JsonMode and deterministic output in AutoGen.OpenAI OpenAIChatAgent (#2347)

* update openai version && add sample for json output

* add example in web

* update update.md

* update image url

* [.Net] Add AutoGen.Mistral package (#2330)

* add mstral client

* enable streaming support

* add mistralClientAgent

* add test for function call

* add extension

* add support for toolcall and toolcall result message

* add support for aggregate message

* implement streaming function call

* track (#2471)

* [.Net] add mistral example (#2482)

* update existing examples to use messageCOnnector

* add overview

* add function call document

* add example 14

* add mistral token count usage example

* update version

* Update dotnet-release.yml (#2488)

* update

* revert gitattributes

---------

Co-authored-by: mhensen <mh@webvize.nl>
Co-authored-by: ekzhu <ekzhu@users.noreply.github.com>
Co-authored-by: Krzysztof Kasprowicz <60486987+Krzysztof318@users.noreply.github.com>
2024-04-26 16:21:46 +00:00

378 lines
16 KiB
C#

// Copyright (c) Microsoft Corporation. All rights reserved.
// OpenAIMessageTests.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text.Json;
using ApprovalTests;
using ApprovalTests.Namers;
using ApprovalTests.Reporters;
using AutoGen.OpenAI;
using Azure.AI.OpenAI;
using FluentAssertions;
using Xunit;
namespace AutoGen.Tests;
public class OpenAIMessageTests
{
private readonly JsonSerializerOptions jsonSerializerOptions = new JsonSerializerOptions
{
WriteIndented = true,
IgnoreReadOnlyProperties = false,
};
[Fact]
[UseReporter(typeof(DiffReporter))]
[UseApprovalSubdirectory("ApprovalTests")]
public void BasicMessageTest()
{
IMessage[] messages = [
new TextMessage(Role.System, "You are a helpful AI assistant"),
new TextMessage(Role.User, "Hello", "user"),
new TextMessage(Role.Assistant, "How can I help you?", from: "assistant"),
new Message(Role.System, "You are a helpful AI assistant"),
new Message(Role.User, "Hello", "user"),
new Message(Role.Assistant, "How can I help you?", from: "assistant"),
new Message(Role.Function, "result", "user"),
new Message(Role.Assistant, null, "assistant")
{
FunctionName = "functionName",
FunctionArguments = "functionArguments",
},
new ImageMessage(Role.User, "https://example.com/image.png", "user"),
new MultiModalMessage(Role.Assistant,
[
new TextMessage(Role.User, "Hello", "user"),
new ImageMessage(Role.User, "https://example.com/image.png", "user"),
], "user"),
new ToolCallMessage("test", "test", "assistant"),
new ToolCallResultMessage("result", "test", "test", "user"),
new ToolCallResultMessage(
[
new ToolCall("result", "test", "test"),
new ToolCall("result", "test", "test"),
], "user"),
new ToolCallMessage(
[
new ToolCall("test", "test"),
new ToolCall("test", "test"),
], "assistant"),
new AggregateMessage<ToolCallMessage, ToolCallResultMessage>(
message1: new ToolCallMessage("test", "test", "assistant"),
message2: new ToolCallResultMessage("result", "test", "test", "assistant"), "assistant"),
];
var openaiMessageConnectorMiddleware = new OpenAIChatRequestMessageConnector();
var agent = new EchoAgent("assistant");
var oaiMessages = messages.Select(m => (m, openaiMessageConnectorMiddleware.ProcessIncomingMessages(agent, [m])));
VerifyOAIMessages(oaiMessages);
}
[Fact]
public void ToOpenAIChatRequestMessageTest()
{
var agent = new EchoAgent("assistant");
var middleware = new OpenAIChatRequestMessageConnector();
// user message
IMessage message = new TextMessage(Role.User, "Hello", "user");
var oaiMessages = middleware.ProcessIncomingMessages(agent, [message]);
oaiMessages.Count().Should().Be(1);
oaiMessages.First().Should().BeOfType<ChatRequestUserMessage>();
var userMessage = (ChatRequestUserMessage)oaiMessages.First();
userMessage.Content.Should().Be("Hello");
// user message test 2
// even if Role is assistant, it should be converted to user message because it is from the user
message = new TextMessage(Role.Assistant, "Hello", "user");
oaiMessages = middleware.ProcessIncomingMessages(agent, [message]);
oaiMessages.Count().Should().Be(1);
oaiMessages.First().Should().BeOfType<ChatRequestUserMessage>();
userMessage = (ChatRequestUserMessage)oaiMessages.First();
userMessage.Content.Should().Be("Hello");
// user message with multimodal content
// image
message = new ImageMessage(Role.User, "https://example.com/image.png", "user");
oaiMessages = middleware.ProcessIncomingMessages(agent, [message]);
oaiMessages.Count().Should().Be(1);
oaiMessages.First().Should().BeOfType<ChatRequestUserMessage>();
userMessage = (ChatRequestUserMessage)oaiMessages.First();
userMessage.Content.Should().BeNullOrEmpty();
userMessage.MultimodalContentItems.Count().Should().Be(1);
userMessage.MultimodalContentItems.First().Should().BeOfType<ChatMessageImageContentItem>();
// text and image
message = new MultiModalMessage(
Role.User,
[
new TextMessage(Role.User, "Hello", "user"),
new ImageMessage(Role.User, "https://example.com/image.png", "user"),
], "user");
oaiMessages = middleware.ProcessIncomingMessages(agent, [message]);
oaiMessages.Count().Should().Be(1);
oaiMessages.First().Should().BeOfType<ChatRequestUserMessage>();
userMessage = (ChatRequestUserMessage)oaiMessages.First();
userMessage.Content.Should().BeNullOrEmpty();
userMessage.MultimodalContentItems.Count().Should().Be(2);
userMessage.MultimodalContentItems.First().Should().BeOfType<ChatMessageTextContentItem>();
// assistant text message
message = new TextMessage(Role.Assistant, "How can I help you?", "assistant");
oaiMessages = middleware.ProcessIncomingMessages(agent, [message]);
oaiMessages.Count().Should().Be(1);
oaiMessages.First().Should().BeOfType<ChatRequestAssistantMessage>();
var assistantMessage = (ChatRequestAssistantMessage)oaiMessages.First();
assistantMessage.Content.Should().Be("How can I help you?");
// assistant text message with single tool call
message = new ToolCallMessage("test", "test", "assistant");
oaiMessages = middleware.ProcessIncomingMessages(agent, [message]);
oaiMessages.Count().Should().Be(1);
oaiMessages.First().Should().BeOfType<ChatRequestAssistantMessage>();
assistantMessage = (ChatRequestAssistantMessage)oaiMessages.First();
assistantMessage.Content.Should().BeNullOrEmpty();
assistantMessage.ToolCalls.Count().Should().Be(1);
assistantMessage.ToolCalls.First().Should().BeOfType<ChatCompletionsFunctionToolCall>();
// user should not suppose to send tool call message
message = new ToolCallMessage("test", "test", "user");
Func<ChatRequestMessage> action = () => middleware.ProcessIncomingMessages(agent, [message]).First();
action.Should().Throw<ArgumentException>().WithMessage("ToolCallMessage is not supported when message.From is not the same with agent");
// assistant text message with multiple tool calls
message = new ToolCallMessage(
toolCalls:
[
new ToolCall("test", "test"),
new ToolCall("test", "test"),
], "assistant");
oaiMessages = middleware.ProcessIncomingMessages(agent, [message]);
oaiMessages.Count().Should().Be(1);
oaiMessages.First().Should().BeOfType<ChatRequestAssistantMessage>();
assistantMessage = (ChatRequestAssistantMessage)oaiMessages.First();
assistantMessage.Content.Should().BeNullOrEmpty();
assistantMessage.ToolCalls.Count().Should().Be(2);
// tool call result message
message = new ToolCallResultMessage("result", "test", "test", "user");
oaiMessages = middleware.ProcessIncomingMessages(agent, [message]);
oaiMessages.Count().Should().Be(1);
oaiMessages.First().Should().BeOfType<ChatRequestToolMessage>();
var toolCallMessage = (ChatRequestToolMessage)oaiMessages.First();
toolCallMessage.Content.Should().Be("result");
// tool call result message with multiple tool calls
message = new ToolCallResultMessage(
toolCalls:
[
new ToolCall("result", "test", "test"),
new ToolCall("result", "test", "test"),
], "user");
oaiMessages = middleware.ProcessIncomingMessages(agent, [message]);
oaiMessages.Count().Should().Be(2);
oaiMessages.First().Should().BeOfType<ChatRequestToolMessage>();
toolCallMessage = (ChatRequestToolMessage)oaiMessages.First();
toolCallMessage.Content.Should().Be("test");
oaiMessages.Last().Should().BeOfType<ChatRequestToolMessage>();
toolCallMessage = (ChatRequestToolMessage)oaiMessages.Last();
toolCallMessage.Content.Should().Be("test");
// aggregate message test
// aggregate message with tool call and tool call result will be returned by GPT agent if the tool call is automatically invoked inside agent
message = new AggregateMessage<ToolCallMessage, ToolCallResultMessage>(
message1: new ToolCallMessage("test", "test", "assistant"),
message2: new ToolCallResultMessage("result", "test", "test", "assistant"), "assistant");
oaiMessages = middleware.ProcessIncomingMessages(agent, [message]);
oaiMessages.Count().Should().Be(2);
oaiMessages.First().Should().BeOfType<ChatRequestAssistantMessage>();
assistantMessage = (ChatRequestAssistantMessage)oaiMessages.First();
assistantMessage.Content.Should().BeNullOrEmpty();
assistantMessage.ToolCalls.Count().Should().Be(1);
oaiMessages.Last().Should().BeOfType<ChatRequestToolMessage>();
toolCallMessage = (ChatRequestToolMessage)oaiMessages.Last();
toolCallMessage.Content.Should().Be("result");
// aggregate message test 2
// if the aggregate message is from user, it should be converted to user message
message = new AggregateMessage<ToolCallMessage, ToolCallResultMessage>(
message1: new ToolCallMessage("test", "test", "user"),
message2: new ToolCallResultMessage("result", "test", "test", "user"), "user");
oaiMessages = middleware.ProcessIncomingMessages(agent, [message]);
oaiMessages.Count().Should().Be(1);
oaiMessages.First().Should().BeOfType<ChatRequestUserMessage>();
userMessage = (ChatRequestUserMessage)oaiMessages.First();
userMessage.Content.Should().Be("result");
// aggregate message test 3
// if the aggregate message is from user and contains multiple tool call results, it should be converted to user message
message = new AggregateMessage<ToolCallMessage, ToolCallResultMessage>(
message1: new ToolCallMessage(
toolCalls:
[
new ToolCall("test", "test"),
new ToolCall("test", "test"),
], from: "user"),
message2: new ToolCallResultMessage(
toolCalls:
[
new ToolCall("result", "test", "test"),
new ToolCall("result", "test", "test"),
], from: "user"), "user");
oaiMessages = middleware.ProcessIncomingMessages(agent, [message]);
oaiMessages.Count().Should().Be(2);
oaiMessages.First().Should().BeOfType<ChatRequestUserMessage>();
oaiMessages.Last().Should().BeOfType<ChatRequestUserMessage>();
// system message
message = new TextMessage(Role.System, "You are a helpful AI assistant");
oaiMessages = middleware.ProcessIncomingMessages(agent, [message]);
oaiMessages.Count().Should().Be(1);
oaiMessages.First().Should().BeOfType<ChatRequestSystemMessage>();
}
[Fact]
public void ToOpenAIChatRequestMessageShortCircuitTest()
{
var agent = new EchoAgent("assistant");
var middleware = new OpenAIChatRequestMessageConnector();
ChatRequestMessage[] messages =
[
new ChatRequestUserMessage("Hello"),
new ChatRequestAssistantMessage("How can I help you?"),
new ChatRequestSystemMessage("You are a helpful AI assistant"),
new ChatRequestFunctionMessage("result", "functionName"),
new ChatRequestToolMessage("test", "test"),
];
foreach (var oaiMessage in messages)
{
IMessage message = new MessageEnvelope<ChatRequestMessage>(oaiMessage);
var oaiMessages = middleware.ProcessIncomingMessages(agent, [message]);
oaiMessages.Count().Should().Be(1);
oaiMessages.First().Should().Be(oaiMessage);
}
}
private void VerifyOAIMessages(IEnumerable<(IMessage, IEnumerable<ChatRequestMessage>)> messages)
{
var jsonObjects = messages.Select(pair =>
{
var (originalMessage, ms) = pair;
var objs = new List<object>();
foreach (var m in ms)
{
object? obj = null;
if (m is ChatRequestUserMessage userMessage)
{
obj = new
{
Role = userMessage.Role.ToString(),
Content = userMessage.Content,
MultiModaItem = userMessage.MultimodalContentItems?.Select(item =>
{
return item switch
{
ChatMessageImageContentItem imageContentItem => new
{
Type = "Image",
ImageUrl = imageContentItem.ImageUrl,
} as object,
ChatMessageTextContentItem textContentItem => new
{
Type = "Text",
Text = textContentItem.Text,
} as object,
_ => throw new System.NotImplementedException(),
};
}),
};
}
if (m is ChatRequestAssistantMessage assistantMessage)
{
obj = new
{
Role = assistantMessage.Role.ToString(),
Content = assistantMessage.Content,
TooCall = assistantMessage.ToolCalls.Select(tc =>
{
return tc switch
{
ChatCompletionsFunctionToolCall functionToolCall => new
{
Type = "Function",
Name = functionToolCall.Name,
Arguments = functionToolCall.Arguments,
Id = functionToolCall.Id,
} as object,
_ => throw new System.NotImplementedException(),
};
}),
FunctionCallName = assistantMessage.FunctionCall?.Name,
FunctionCallArguments = assistantMessage.FunctionCall?.Arguments,
};
}
if (m is ChatRequestSystemMessage systemMessage)
{
obj = new
{
Role = systemMessage.Role.ToString(),
Content = systemMessage.Content,
};
}
if (m is ChatRequestFunctionMessage functionMessage)
{
obj = new
{
Role = functionMessage.Role.ToString(),
Content = functionMessage.Content,
Name = functionMessage.Name,
};
}
if (m is ChatRequestToolMessage toolCallMessage)
{
obj = new
{
Role = toolCallMessage.Role.ToString(),
Content = toolCallMessage.Content,
ToolCallId = toolCallMessage.ToolCallId,
};
}
objs.Add(obj ?? throw new System.NotImplementedException());
}
return new
{
OriginalMessage = originalMessage.ToString(),
ConvertedMessages = objs,
};
});
var json = JsonSerializer.Serialize(jsonObjects, this.jsonSerializerOptions);
Approvals.Verify(json);
}
}