Merge branch 'main' into fix/readme-dupe

This commit is contained in:
Kosta Petan
2024-05-27 21:08:18 +02:00
committed by GitHub
4 changed files with 29 additions and 29 deletions

View File

@@ -11,6 +11,7 @@ At the moment the library resides in `src/` only, but we plan to publish them as
We have created a few examples to help you get started with the framework and to explore its capabilities.
- [GitHub Dev Team Sample](samples/gh-flow/README.md): Build an AI Developer Team using event-driven agents, that help you automate the requirements engineering, planning, and coding process on GitHub.
- [Marketing Team Sample](samples/marketing/README.md): Create a marketing campaign using a content writer, graphic designer and social media manager.
## Contributing

View File

@@ -2,7 +2,6 @@
using Microsoft.AI.Agents.Orleans;
using Microsoft.AI.DevTeam.Events;
using Microsoft.AI.DevTeam.Extensions;
using System.Globalization;
namespace Microsoft.AI.DevTeam;
@@ -24,44 +23,36 @@ public class AzureGenie : Agent
throw new ArgumentNullException(nameof(item));
}
var data = item.Data ?? throw new ArgumentNullException(nameof(item));
var parentNumber = data.TryParseLong("parentNumber");
var issueNumber = data.TryParseLong("issueNumber");
var org = data["org"].ToString();
var repo = data["repo"].ToString();
var subject = $"{org}/{repo}/{issueNumber}";
switch (item.Type)
{
case nameof(GithubFlowEventType.ReadmeCreated):
await Store(org, repo, parentNumber, issueNumber, "readme", "md", "output", data["readme"].ToString());
{
var context = item.ToGithubContext();
await Store(context.Org,context.Repo, context.ParentNumber.Value, context.IssueNumber, "readme", "md", "output", item.Data["readme"]);
await PublishEvent(Consts.MainNamespace, this.GetPrimaryKeyString(), new Event
{
Type = nameof(GithubFlowEventType.ReadmeStored),
Subject = subject,
Data = new Dictionary<string, string> {
{ "org", org },
{ "repo", repo },
{ "issueNumber", $"{issueNumber}" },
{ "parentNumber", $"{parentNumber}" }
}
Subject = context.Subject,
Data = context.ToData()
});
break;
break;
}
case nameof(GithubFlowEventType.CodeCreated):
await Store(org, repo, parentNumber, issueNumber, "run", "sh", "output", data["code"].ToString());
await RunInSandbox(org, repo, parentNumber, issueNumber);
{
var context = item.ToGithubContext();
await Store(context.Org,context.Repo, context.ParentNumber.Value, context.IssueNumber, "run", "sh", "output", item.Data["code"]);
await RunInSandbox(context.Org,context.Repo, context.ParentNumber.Value, context.IssueNumber);
await PublishEvent(Consts.MainNamespace, this.GetPrimaryKeyString(), new Event
{
Type = nameof(GithubFlowEventType.SandboxRunCreated),
Subject = subject,
Data = new Dictionary<string, string> {
{ "org", org },
{ "repo", repo },
{ "issueNumber", $"{issueNumber}" },
{ "parentNumber", $"{parentNumber}" }
}
Subject = context.Subject,
Data = context.ToData()
});
break;
break;
}
default:
break;
}

View File

@@ -2,6 +2,7 @@ using Microsoft.AI.Agents.Abstractions;
using Microsoft.AI.Agents.Orleans;
using Microsoft.AI.DevTeam.Events;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Connectors.OpenAI;
using Microsoft.SemanticKernel.Memory;
using Orleans.Runtime;
@@ -65,7 +66,13 @@ public class DeveloperLead : AiAgent<DeveloperLeadState>, ILeadDevelopers
var context = new KernelArguments { ["input"] = AppendChatHistory(ask) };
var instruction = "Consider the following architectural guidelines:!waf!";
var enhancedContext = await AddKnowledge(instruction, "waf", context);
return await CallFunction(DevLeadSkills.Plan, enhancedContext);
var settings = new OpenAIPromptExecutionSettings{
ResponseFormat = "json_object",
MaxTokens = 4096,
Temperature = 0.8,
TopP = 1
};
return await CallFunction(DevLeadSkills.Plan, enhancedContext, settings);
}
catch (Exception ex)
{

View File

@@ -45,7 +45,8 @@ public abstract class AiAgent<T> : Agent, IAiAgent where T : class, new()
public virtual async Task<string> CallFunction(string template, KernelArguments arguments, OpenAIPromptExecutionSettings? settings = null)
{
var propmptSettings = settings ?? new OpenAIPromptExecutionSettings { MaxTokens = 18000, Temperature = 0.8, TopP = 1 };
// TODO: extract this to be configurable
var propmptSettings = settings ?? new OpenAIPromptExecutionSettings { MaxTokens = 4096, Temperature = 0.8, TopP = 1 };
var function = _kernel.CreateFunctionFromPrompt(template, propmptSettings);
var result = (await _kernel.InvokeAsync(function, arguments)).ToString();
AddToHistory(result, ChatUserType.Agent);