diff --git a/dotnet/Directory.Build.props b/dotnet/Directory.Build.props
index 5641c6cac..aeb667438 100644
--- a/dotnet/Directory.Build.props
+++ b/dotnet/Directory.Build.props
@@ -9,6 +9,7 @@
enable
True
$(MSBuildThisFileDirectory)eng/opensource.snk
+ 0024000004800000940000000602000000240000525341310004000001000100f1d038d0b85ae392ad72011df91e9343b0b5df1bb8080aa21b9424362d696919e0e9ac3a8bca24e283e10f7a569c6f443e1d4e3ebc84377c87ca5caa562e80f9932bf5ea91b7862b538e13b8ba91c7565cf0e8dfeccfea9c805ae3bda044170ecc7fc6f147aeeac422dd96aeb9eb1f5a5882aa650efe2958f2f8107d2038f2ab
CS1998;CS1591
$(NoWarn);$(CSNoWarn);NU5104
true
@@ -20,4 +21,4 @@
$(MSBuildThisFileDirectory)
-
\ No newline at end of file
+
diff --git a/dotnet/src/AutoGen.LMStudio/LMStudioConfig.cs b/dotnet/src/AutoGen.LMStudio/LMStudioConfig.cs
index 4cf18210a..3864630b3 100644
--- a/dotnet/src/AutoGen.LMStudio/LMStudioConfig.cs
+++ b/dotnet/src/AutoGen.LMStudio/LMStudioConfig.cs
@@ -13,6 +13,15 @@ public class LMStudioConfig : ILLMConfig
this.Host = host;
this.Port = port;
this.Version = version;
+ this.Uri = new Uri($"http://{host}:{port}/v{version}");
+ }
+
+ public LMStudioConfig(Uri uri)
+ {
+ this.Uri = uri;
+ this.Host = uri.Host;
+ this.Port = uri.Port;
+ this.Version = int.Parse(uri.Segments[1].TrimStart('v'));
}
public string Host { get; }
@@ -21,5 +30,5 @@ public class LMStudioConfig : ILLMConfig
public int Version { get; }
- public Uri Uri => new Uri($"http://{Host}:{Port}/v{Version}");
+ public Uri Uri { get; }
}
diff --git a/dotnet/src/AutoGen/Agent/ConversableAgent.cs b/dotnet/src/AutoGen/Agent/ConversableAgent.cs
index d79d25192..fe1470502 100644
--- a/dotnet/src/AutoGen/Agent/ConversableAgent.cs
+++ b/dotnet/src/AutoGen/Agent/ConversableAgent.cs
@@ -6,6 +6,7 @@ using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
+using AutoGen.LMStudio;
using AutoGen.OpenAI;
namespace AutoGen;
@@ -74,15 +75,25 @@ public class ConversableAgent : IAgent
this.functions = llmConfig?.FunctionContracts;
}
+ ///
+ /// For test purpose only.
+ ///
+ internal IAgent? InnerAgent => this.innerAgent;
+
private IAgent? CreateInnerAgentFromConfigList(ConversableAgentConfig config)
{
IAgent? agent = null;
foreach (var llmConfig in config.ConfigList ?? Enumerable.Empty())
{
- var nextAgent = llmConfig switch
+ IAgent nextAgent = llmConfig switch
{
AzureOpenAIConfig azureConfig => new GPTAgent(this.Name!, this.systemMessage, azureConfig, temperature: config.Temperature ?? 0),
OpenAIConfig openAIConfig => new GPTAgent(this.Name!, this.systemMessage, openAIConfig, temperature: config.Temperature ?? 0),
+ LMStudioConfig lmStudioConfig => new LMStudioAgent(
+ name: this.Name,
+ config: lmStudioConfig,
+ systemMessage: this.systemMessage,
+ temperature: config.Temperature ?? 0),
_ => throw new ArgumentException($"Unsupported config type {llmConfig.GetType()}"),
};
diff --git a/dotnet/src/AutoGen/AutoGen.csproj b/dotnet/src/AutoGen/AutoGen.csproj
index 8f4bbccb5..7e14ad4e7 100644
--- a/dotnet/src/AutoGen/AutoGen.csproj
+++ b/dotnet/src/AutoGen/AutoGen.csproj
@@ -26,5 +26,9 @@
+
+
+
+
diff --git a/dotnet/test/AutoGen.Mistral.Tests/MistralClientAgentTests.cs b/dotnet/test/AutoGen.Mistral.Tests/MistralClientAgentTests.cs
index 5a9d1f95c..bcd5f1309 100644
--- a/dotnet/test/AutoGen.Mistral.Tests/MistralClientAgentTests.cs
+++ b/dotnet/test/AutoGen.Mistral.Tests/MistralClientAgentTests.cs
@@ -37,7 +37,7 @@ public partial class MistralClientAgentTests
model: "open-mistral-7b")
.RegisterMessageConnector();
var singleAgentTest = new SingleAgentTest(_output);
- await singleAgentTest.UpperCaseTest(agent);
+ await singleAgentTest.UpperCaseTestAsync(agent);
await singleAgentTest.UpperCaseStreamingTestAsync(agent);
}
diff --git a/dotnet/test/AutoGen.Tests/SingleAgentTest.cs b/dotnet/test/AutoGen.Tests/SingleAgentTest.cs
index ae566889b..79d2b9c2f 100644
--- a/dotnet/test/AutoGen.Tests/SingleAgentTest.cs
+++ b/dotnet/test/AutoGen.Tests/SingleAgentTest.cs
@@ -6,6 +6,7 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
+using AutoGen.LMStudio;
using AutoGen.OpenAI;
using Azure.AI.OpenAI;
using FluentAssertions;
@@ -42,7 +43,7 @@ namespace AutoGen.Tests
var agent = new GPTAgent("gpt", "You are a helpful AI assistant", config);
- await UpperCaseTest(agent);
+ await UpperCaseTestAsync(agent);
await UpperCaseStreamingTestAsync(agent);
}
@@ -117,7 +118,7 @@ namespace AutoGen.Tests
var agentWithFunction = new GPTAgent("gpt", "You are a helpful AI assistant", config, 0, functions: new[] { this.EchoAsyncFunction });
await EchoFunctionCallTestAsync(agentWithFunction);
- await UpperCaseTest(agentWithFunction);
+ await UpperCaseTestAsync(agentWithFunction);
}
[ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT")]
@@ -143,7 +144,43 @@ namespace AutoGen.Tests
llmConfig: llmConfig);
await EchoFunctionCallTestAsync(assistantAgent);
- await UpperCaseTest(assistantAgent);
+ await UpperCaseTestAsync(assistantAgent);
+ }
+
+ [Fact]
+ public async Task ItCreateAssistantAgentFromLMStudioConfigAsync()
+ {
+ var host = "http://localhost";
+ var port = 8080;
+ var lmStudioConfig = new LMStudioConfig(host, port);
+
+ var assistantAgent = new AssistantAgent(
+ name: "assistant",
+ llmConfig: new ConversableAgentConfig()
+ {
+ ConfigList = [lmStudioConfig],
+ });
+
+ assistantAgent.Name.Should().Be("assistant");
+ assistantAgent.InnerAgent.Should().BeOfType();
+ }
+
+ [ApiKeyFact("LMStudio_ENDPOINT")]
+ public async Task ItTestAssistantAgentFromLMStudioConfigAsync()
+ {
+ var Uri = Environment.GetEnvironmentVariable("LMStudio_ENDPOINT") ?? throw new ArgumentException("LMStudio_ENDPOINT is not set");
+ var lmStudioConfig = new LMStudioConfig(new Uri(Uri));
+
+ var assistantAgent = new AssistantAgent(
+ name: "assistant",
+ llmConfig: new ConversableAgentConfig()
+ {
+ ConfigList = [lmStudioConfig],
+ });
+
+ assistantAgent.Name.Should().Be("assistant");
+ assistantAgent.InnerAgent.Should().BeOfType();
+ await this.UpperCaseTestAsync(assistantAgent);
}
@@ -186,7 +223,6 @@ namespace AutoGen.Tests
});
await EchoFunctionCallExecutionTestAsync(assistantAgent);
- await UpperCaseTest(assistantAgent);
}
[ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT")]
@@ -206,7 +242,7 @@ namespace AutoGen.Tests
await EchoFunctionCallExecutionStreamingTestAsync(agent);
await EchoFunctionCallExecutionTestAsync(agent);
- await UpperCaseTest(agent);
+ await UpperCaseTestAsync(agent);
}
///
@@ -283,7 +319,7 @@ namespace AutoGen.Tests
}
}
- public async Task UpperCaseTest(IAgent agent)
+ public async Task UpperCaseTestAsync(IAgent agent)
{
var message = new TextMessage(Role.System, "You are a helpful AI assistant that convert user message to upper case");
var uppCaseMessage = new TextMessage(Role.User, "abcdefg");
diff --git a/dotnet/website/update.md b/dotnet/website/update.md
index b65ab128e..3d905c0ab 100644
--- a/dotnet/website/update.md
+++ b/dotnet/website/update.md
@@ -2,6 +2,7 @@
- [API Breaking Change] Update the return type of `IStreamingAgent.GenerateStreamingReplyAsync` from `Task>` to `IAsyncEnumerable`
- [API Breaking Change] Update the return type of `IStreamingMiddleware.InvokeAsync` from `Task>` to `IAsyncEnumerable`
- [API Breaking Change] Mark `RegisterReply`, `RegisterPreProcess` and `RegisterPostProcess` as obsolete. You can replace them with `RegisterMiddleware`
+- Fix [Issue 2609](https://github.com/microsoft/autogen/issues/2609)
##### Update on 0.0.12 (2024-04-22)
- Add AutoGen.Mistral package to support Mistral.AI models
##### Update on 0.0.11 (2024-04-10)