mirror of
https://github.com/microsoft/autogen.git
synced 2026-02-14 10:55:04 -05:00
* update readme * update * update * update * update * update * update * add sample project * revert notebook change back * update * update interactive version * add nuget package * refactor Message * update example * add azure nightly build pipeline * Set up CI with Azure Pipelines [skip ci] * Update nightly-build.yml for Azure Pipelines * add dotnet interactive package * add dotnet interactive package * update pipeline * add nuget feed back * remove dotnet-tool feed * remove dotnet-tool feed comment * update pipeline * update build name * Update nightly-build.yml * Delete .github/workflows/dotnet-ci.yml * update * add working_dir to use step * add initateChat api * update oai package * Update dotnet-build.yml * Update dotnet-run-openai-test-and-notebooks.yml * update build workflow * update build workflow * update nuget feed * update nuget feed * update aoai and sk version * Update InteractiveService.cs * add support for GPT 4V * add DalleAndGPT4V example * update example * add user proxy agent * add readme * bump version * update example * add dotnet interactive hook * update * udpate tests * add website * update index.md * add docs * update doc * move sk dependency out of core package * udpate doc * Update Use-function-call.md * add type safe function call document * update doc * update doc * add dock * Update Use-function-call.md * add GenerateReplyOptions * remove IChatLLM * update version * update doc * update website * add sample * fix link * add middleware agent * clean up doc * bump version * update doc * update * add Other Language * remove warnings * add sign.props * add sign step * fix pipelien * auth * real sign * disable PR trigger * update * disable PR trigger * use microbuild machine * update build pipeline to add publish to internal feed * add internal feed * fix build pipeline * add dotnet prefix * update ci * add build number * update run number * update source * update token * update * remove adding source * add publish to github package * try again * try again * ask for write pacakge * disable package when branch is not main * update * implement streaming agent * add test for streaming function call * update * fix #1588 * enable PR check for dotnet branch * add website readme * only publish to dotnet feed when pushing to dotnet branch * remove openai-test-and-notebooks workflow * update readme * update readme * update workflow * update getting-start * upgrade test and sample proejct to use .net 8 * fix global.json format && make loadFromConfig API internal only before implementing * update * add support for LM studio * add doc * Update README.md * add push and workflow_dispatch trigger * disable PR for main * add dotnet env * Update Installation.md * add nuget * refer to newtonsoft 13 * update branch to dotnet in docfx * Update Installation.md * pull out HumanInputMiddleware and FunctionCallMiddleware * fix tests * add link to sample folder * refactor message * refactor over IMessage * add more tests * add more test * fix build error * rename header * add semantic kernel project * update sk example * update dotnet version * add LMStudio function call example * rename LLaMAFunctin * remove dotnet run openai test and notebook workflow * add FunctionContract and test * update doc * add documents * add workflow * update * update sample * fix warning in test * reult length can be less then maximumOutputToKeep (#1804) * merge with main * add option to retrieve inner agent and middlewares from MiddlewareAgent * update doc * adjust namespace * update readme * fix test * use IMessage * more updates * update * fix test * add comments * use FunctionContract to replace FunctionDefinition * move AutoGen contrac to AutoGen.Core * update installation * refactor streamingAgent by adding StreamingMessage type * update sample * update samples * update * update * add test * fix test * bump version * add openaichat test * update * Update Example03_Agent_FunctionCall.cs * [.Net] improve docs (#1862) * add doc * add doc * add doc * add doc * add doc * add doc * update * fix test error * fix some error * fix test * fix test * add more tests * edits --------- Co-authored-by: ekzhu <ekzhu@users.noreply.github.com> * [.Net] Add fill form example (#1911) * add form filler example * update * fix ci error * [.Net] Add using AutoGen.Core in source generator (#1983) * fix using namespace bug in source generator * remove using in sourcegenerator test * disable PR test * Add .idea to .gitignore (#1988) * [.Net] publish to nuget.org feed (#1987) * publish to nuget * update ci * update dotnet-release * update release pipeline * add source * remove empty symbol package * update pipeline * remove tag * update installation guide * [.Net] Rename some classes && APIs based on doc review (#1980) * rename sequential group chat to round robin group chat * rename to sendInstruction * rename workflow to graph * rename some api * bump version * move Graph to GroupChat folder * rename fill application example * [.Net] Improve package description (#2161) * add discord link and update package description * Update getting-start.md * [.Net] Fix document comment from the most recent AutoGen.Net engineer sync (#2231) * update * rename RegisterPrintMessageHook to RegisterPrintMessage * update website * update update.md * fix link error * [.Net] Enable JsonMode and deterministic output in AutoGen.OpenAI OpenAIChatAgent (#2347) * update openai version && add sample for json output * add example in web * update update.md * update image url * [.Net] Add AutoGen.Mistral package (#2330) * add mstral client * enable streaming support * add mistralClientAgent * add test for function call * add extension * add support for toolcall and toolcall result message * add support for aggregate message * implement streaming function call * track (#2471) * [.Net] add mistral example (#2482) * update existing examples to use messageCOnnector * add overview * add function call document * add example 14 * add mistral token count usage example * update version * Update dotnet-release.yml (#2488) * update * revert gitattributes --------- Co-authored-by: mhensen <mh@webvize.nl> Co-authored-by: ekzhu <ekzhu@users.noreply.github.com> Co-authored-by: Krzysztof Kasprowicz <60486987+Krzysztof318@users.noreply.github.com>
243 lines
9.5 KiB
C#
243 lines
9.5 KiB
C#
// Copyright (c) Microsoft Corporation. All rights reserved.
|
|
// MathClassTest.cs
|
|
|
|
using System;
|
|
using System.Collections.Generic;
|
|
using System.Linq;
|
|
using System.Threading.Tasks;
|
|
using AutoGen.OpenAI;
|
|
using FluentAssertions;
|
|
using Xunit.Abstractions;
|
|
|
|
namespace AutoGen.Tests
|
|
{
|
|
public partial class MathClassTest
|
|
{
|
|
private readonly ITestOutputHelper _output;
|
|
public MathClassTest(ITestOutputHelper output)
|
|
{
|
|
_output = output;
|
|
}
|
|
|
|
[FunctionAttribute]
|
|
public async Task<string> CreateMathQuestion(string question, int question_index)
|
|
{
|
|
return $@"// ignore this line [MATH_QUESTION]
|
|
Question #{question_index}:
|
|
{question}";
|
|
}
|
|
|
|
[FunctionAttribute]
|
|
public async Task<string> AnswerQuestion(string answer)
|
|
{
|
|
return $@"// ignore this line [MATH_ANSWER]
|
|
The answer is {answer}, teacher please check answer";
|
|
}
|
|
|
|
[FunctionAttribute]
|
|
public async Task<string> AnswerIsCorrect(string message)
|
|
{
|
|
return $@"// ignore this line [ANSWER_IS_CORRECT]
|
|
{message}";
|
|
}
|
|
|
|
[FunctionAttribute]
|
|
public async Task<string> UpdateProgress(int correctAnswerCount)
|
|
{
|
|
if (correctAnswerCount >= 5)
|
|
{
|
|
return $@"// ignore this line [UPDATE_PROGRESS]
|
|
{GroupChatExtension.TERMINATE}";
|
|
}
|
|
else
|
|
{
|
|
return $@"// ignore this line [UPDATE_PROGRESS]
|
|
the number of resolved question is {correctAnswerCount}
|
|
teacher, please create the next math question";
|
|
}
|
|
}
|
|
|
|
|
|
[ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT")]
|
|
public async Task AssistantAgentMathChatTestAsync()
|
|
{
|
|
var teacher = await CreateTeacherAssistantAgentAsync();
|
|
var student = await CreateStudentAssistantAgentAsync();
|
|
var key = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY") ?? throw new ArgumentException("AZURE_OPENAI_API_KEY is not set");
|
|
var endPoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new ArgumentException("AZURE_OPENAI_ENDPOINT is not set");
|
|
var model = "gpt-35-turbo-16k";
|
|
var admin = new GPTAgent(
|
|
name: "Admin",
|
|
systemMessage: $@"You are admin. You ask teacher to create 5 math questions. You update progress after each question is answered.",
|
|
config: new AzureOpenAIConfig(endPoint, model, key),
|
|
functions: new[]
|
|
{
|
|
this.UpdateProgressFunction,
|
|
},
|
|
functionMap: new Dictionary<string, Func<string, Task<string>>>
|
|
{
|
|
{ this.UpdateProgressFunction.Name, this.UpdateProgressWrapper },
|
|
})
|
|
.RegisterMiddleware(async (messages, options, agent, ct) =>
|
|
{
|
|
// check admin reply to make sure it calls UpdateProgress function
|
|
var maxAttempt = 5;
|
|
var reply = await agent.GenerateReplyAsync(messages, options, ct);
|
|
while (maxAttempt-- > 0)
|
|
{
|
|
if (options?.Functions is { Length: 0 })
|
|
{
|
|
return reply;
|
|
}
|
|
|
|
var formattedMessage = reply.FormatMessage();
|
|
this._output.WriteLine(formattedMessage);
|
|
if (reply.GetContent()?.Contains("[UPDATE_PROGRESS]") is true)
|
|
{
|
|
return reply;
|
|
}
|
|
else
|
|
{
|
|
await Task.Delay(1000);
|
|
var review = "Admin, please update progress based on conversation";
|
|
reply = await agent.SendAsync(review, messages, ct);
|
|
}
|
|
}
|
|
|
|
throw new Exception("Admin does not call UpdateProgress function");
|
|
});
|
|
|
|
await RunMathChatAsync(teacher, student, admin);
|
|
}
|
|
|
|
private async Task<IAgent> CreateTeacherAssistantAgentAsync()
|
|
{
|
|
var key = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY") ?? throw new ArgumentException("AZURE_OPENAI_API_KEY is not set");
|
|
var endPoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new ArgumentException("AZURE_OPENAI_ENDPOINT is not set");
|
|
var model = "gpt-35-turbo-16k";
|
|
var config = new AzureOpenAIConfig(endPoint, model, key);
|
|
var llmConfig = new ConversableAgentConfig
|
|
{
|
|
ConfigList = new[]
|
|
{
|
|
config,
|
|
},
|
|
FunctionContracts = new[]
|
|
{
|
|
this.CreateMathQuestionFunctionContract,
|
|
this.AnswerIsCorrectFunctionContract,
|
|
},
|
|
};
|
|
|
|
var teacher = new AssistantAgent(
|
|
name: "Teacher",
|
|
systemMessage: $@"You are a preschool math teacher.
|
|
You create math question and ask student to answer it.
|
|
Then you check if the answer is correct.
|
|
If the answer is wrong, you ask student to fix it.
|
|
If the answer is correct, you create another math question.
|
|
",
|
|
llmConfig: llmConfig,
|
|
functionMap: new Dictionary<string, Func<string, Task<string>>>
|
|
{
|
|
{ this.CreateMathQuestionFunction.Name, this.CreateMathQuestionWrapper },
|
|
{ this.AnswerIsCorrectFunction.Name, this.AnswerIsCorrectWrapper },
|
|
});
|
|
|
|
return teacher;
|
|
}
|
|
|
|
private async Task<IAgent> CreateStudentAssistantAgentAsync()
|
|
{
|
|
var key = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY") ?? throw new ArgumentException("AZURE_OPENAI_API_KEY is not set");
|
|
var endPoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new ArgumentException("AZURE_OPENAI_ENDPOINT is not set");
|
|
var model = "gpt-35-turbo-16k";
|
|
var config = new AzureOpenAIConfig(endPoint, model, key);
|
|
var llmConfig = new ConversableAgentConfig
|
|
{
|
|
FunctionContracts = new[]
|
|
{
|
|
this.AnswerQuestionFunctionContract,
|
|
},
|
|
ConfigList = new[]
|
|
{
|
|
config,
|
|
},
|
|
};
|
|
var student = new AssistantAgent(
|
|
name: "Student",
|
|
systemMessage: $@"You are a student. Here's your workflow in pseudo code:
|
|
-workflow-
|
|
answer_question
|
|
if answer is wrong
|
|
fix_answer
|
|
-end-
|
|
|
|
Here are a few examples of answer_question:
|
|
-example 1-
|
|
2
|
|
|
|
Here are a few examples of fix_answer:
|
|
-example 1-
|
|
sorry, the answer should be 2, not 3
|
|
",
|
|
llmConfig: llmConfig,
|
|
functionMap: new Dictionary<string, Func<string, Task<string>>>
|
|
{
|
|
{ this.AnswerQuestionFunction.Name, this.AnswerQuestionWrapper }
|
|
});
|
|
|
|
return student;
|
|
}
|
|
|
|
private async Task RunMathChatAsync(IAgent teacher, IAgent student, IAgent admin)
|
|
{
|
|
var group = new GroupChat(
|
|
[
|
|
admin,
|
|
teacher,
|
|
student,
|
|
],
|
|
admin);
|
|
|
|
admin.SendIntroduction($@"Welcome to the group chat! I'm admin", group);
|
|
teacher.SendIntroduction($@"Hey I'm Teacher", group);
|
|
student.SendIntroduction($@"Hey I'm Student", group);
|
|
admin.SendIntroduction(@$"Teacher, please create pre-school math question for student and check answer.
|
|
Student, for each question, please answer it and ask teacher to check if the answer is correct.
|
|
I'll update the progress after each question is answered.
|
|
The conversation will end after 5 correct answers.
|
|
", group);
|
|
|
|
var groupChatManager = new GroupChatManager(group);
|
|
var chatHistory = await admin.InitiateChatAsync(groupChatManager, maxRound: 50);
|
|
|
|
// print chat history
|
|
foreach (var message in chatHistory)
|
|
{
|
|
_output.WriteLine(message.FormatMessage());
|
|
}
|
|
|
|
// check if there's five questions from teacher
|
|
chatHistory.Where(msg => msg.From == teacher.Name && msg.GetContent()?.Contains("[MATH_QUESTION]") is true)
|
|
.Count()
|
|
.Should().BeGreaterThanOrEqualTo(5);
|
|
|
|
// check if there's more than five answers from student (answer might be wrong)
|
|
chatHistory.Where(msg => msg.From == student.Name && msg.GetContent()?.Contains("[MATH_ANSWER]") is true)
|
|
.Count()
|
|
.Should().BeGreaterThanOrEqualTo(5);
|
|
|
|
// check if there's five answer_is_correct from teacher
|
|
chatHistory.Where(msg => msg.From == teacher.Name && msg.GetContent()?.Contains("[ANSWER_IS_CORRECT]") is true)
|
|
.Count()
|
|
.Should().BeGreaterThanOrEqualTo(5);
|
|
|
|
// check if there's terminate chat message from admin
|
|
chatHistory.Where(msg => msg.From == admin.Name && msg.IsGroupChatTerminateMessage())
|
|
.Count()
|
|
.Should().Be(1);
|
|
}
|
|
}
|
|
}
|