diff --git a/dotnet/AutoGen.sln b/dotnet/AutoGen.sln
index 6c4e8f0396b6..5ecfe1938873 100644
--- a/dotnet/AutoGen.sln
+++ b/dotnet/AutoGen.sln
@@ -57,8 +57,9 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.Gemini", "src\AutoG
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.Gemini.Tests", "test\AutoGen.Gemini.Tests\AutoGen.Gemini.Tests.csproj", "{8EA16BAB-465A-4C07-ABC4-1070D40067E9}"
EndProject
-Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AutoGen.Gemini.Sample", "sample\AutoGen.Gemini.Sample\AutoGen.Gemini.Sample.csproj", "{19679B75-CE3A-4DF0-A3F0-CA369D2760A4}"
-Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AutoGen.AotCompatibility.Tests", "test\AutoGen.AotCompatibility.Tests\AutoGen.AotCompatibility.Tests.csproj", "{6B82F26D-5040-4453-B21B-C8D1F913CE4C}"
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.Gemini.Sample", "sample\AutoGen.Gemini.Sample\AutoGen.Gemini.Sample.csproj", "{19679B75-CE3A-4DF0-A3F0-CA369D2760A4}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.AotCompatibility.Tests", "test\AutoGen.AotCompatibility.Tests\AutoGen.AotCompatibility.Tests.csproj", "{6B82F26D-5040-4453-B21B-C8D1F913CE4C}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
diff --git a/dotnet/sample/AutoGen.BasicSamples/AutoGen.BasicSample.csproj b/dotnet/sample/AutoGen.BasicSamples/AutoGen.BasicSample.csproj
index afc761649062..6f55a04592f5 100644
--- a/dotnet/sample/AutoGen.BasicSamples/AutoGen.BasicSample.csproj
+++ b/dotnet/sample/AutoGen.BasicSamples/AutoGen.BasicSample.csproj
@@ -6,6 +6,7 @@
enable
True
$(NoWarn);CS8981;CS8600;CS8602;CS8604;CS8618;CS0219;SKEXP0054;SKEXP0050;SKEXP0110
+ true
@@ -15,10 +16,4 @@
-
-
-
- PreserveNewest
-
-
diff --git a/dotnet/sample/AutoGen.BasicSamples/Example05_Dalle_And_GPT4V.cs b/dotnet/sample/AutoGen.BasicSamples/Example05_Dalle_And_GPT4V.cs
index 67fd40ea3ac4..ba7b5d4bde44 100644
--- a/dotnet/sample/AutoGen.BasicSamples/Example05_Dalle_And_GPT4V.cs
+++ b/dotnet/sample/AutoGen.BasicSamples/Example05_Dalle_And_GPT4V.cs
@@ -61,7 +61,7 @@ public static async Task RunAsync()
var gpt4vConfig = autogen.GetOpenAIConfigList(openAIKey, new[] { "gpt-4-vision-preview" });
var openAIClient = new OpenAIClient(openAIKey);
var instance = new Example05_Dalle_And_GPT4V(openAIClient);
- var imagePath = Path.Combine(Environment.CurrentDirectory, "image.jpg");
+ var imagePath = Path.Combine("resource", "images", "background.png");
if (File.Exists(imagePath))
{
File.Delete(imagePath);
diff --git a/dotnet/sample/AutoGen.BasicSamples/Example15_GPT4V_BinaryDataImageMessage.cs b/dotnet/sample/AutoGen.BasicSamples/Example15_GPT4V_BinaryDataImageMessage.cs
index f376342ed85e..788122d3f383 100644
--- a/dotnet/sample/AutoGen.BasicSamples/Example15_GPT4V_BinaryDataImageMessage.cs
+++ b/dotnet/sample/AutoGen.BasicSamples/Example15_GPT4V_BinaryDataImageMessage.cs
@@ -14,7 +14,7 @@ namespace AutoGen.BasicSample;
///
public static class Example15_GPT4V_BinaryDataImageMessage
{
- private static readonly string ImageResourcePath = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "ImageResources");
+ private static readonly string ImageResourcePath = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "resource", "images");
private static Dictionary _mediaTypeMappings = new()
{
@@ -28,13 +28,14 @@ public static class Example15_GPT4V_BinaryDataImageMessage
public static async Task RunAsync()
{
var openAIKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable.");
- var openAiConfig = new OpenAIConfig(openAIKey, "gpt-4-vision-preview");
+ var openAiConfig = new OpenAIConfig(openAIKey, "gpt-4o");
var visionAgent = new GPTAgent(
name: "gpt",
systemMessage: "You are a helpful AI assistant",
config: openAiConfig,
- temperature: 0);
+ temperature: 0)
+ .RegisterPrintMessage();
List messages =
[new TextMessage(Role.User, "What is this image?", from: "user")];
diff --git a/dotnet/sample/AutoGen.BasicSamples/GettingStart/Agent_Middleware.cs b/dotnet/sample/AutoGen.BasicSamples/GettingStart/Agent_Middleware.cs
new file mode 100644
index 000000000000..57f8ab4075c2
--- /dev/null
+++ b/dotnet/sample/AutoGen.BasicSamples/GettingStart/Agent_Middleware.cs
@@ -0,0 +1,80 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Agent_Middleware.cs
+
+#region Using
+using AutoGen.Core;
+using AutoGen.OpenAI;
+using AutoGen.OpenAI.Extension;
+using Azure.AI.OpenAI;
+#endregion Using
+using FluentAssertions;
+
+namespace AutoGen.BasicSample;
+
+public class Agent_Middleware
+{
+ public static async Task RunTokenCountAsync()
+ {
+ #region Create_Agent
+ var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new InvalidOperationException("Please set the environment variable OPENAI_API_KEY");
+ var model = "gpt-3.5-turbo";
+ var openaiClient = new OpenAIClient(apiKey);
+ var openaiMessageConnector = new OpenAIChatRequestMessageConnector();
+ var totalTokenCount = 0;
+ var agent = new OpenAIChatAgent(
+ openAIClient: openaiClient,
+ name: "agent",
+ modelName: model,
+ systemMessage: "You are a helpful AI assistant")
+ .RegisterMiddleware(async (messages, option, innerAgent, ct) =>
+ {
+ var reply = await innerAgent.GenerateReplyAsync(messages, option, ct);
+ if (reply is MessageEnvelope chatCompletions)
+ {
+ var tokenCount = chatCompletions.Content.Usage.TotalTokens;
+ totalTokenCount += tokenCount;
+ }
+ return reply;
+ })
+ .RegisterMiddleware(openaiMessageConnector);
+ #endregion Create_Agent
+
+ #region Chat_With_Agent
+ var reply = await agent.SendAsync("Tell me a joke");
+ Console.WriteLine($"Total token count: {totalTokenCount}");
+ #endregion Chat_With_Agent
+
+ #region verify_reply
+ reply.Should().BeOfType();
+ totalTokenCount.Should().BeGreaterThan(0);
+ #endregion verify_reply
+ }
+
+ public static async Task RunRagTaskAsync()
+ {
+ #region Create_Agent
+ var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new InvalidOperationException("Please set the environment variable OPENAI_API_KEY");
+ var model = "gpt-3.5-turbo";
+ var openaiClient = new OpenAIClient(apiKey);
+ var openaiMessageConnector = new OpenAIChatRequestMessageConnector();
+ var agent = new OpenAIChatAgent(
+ openAIClient: openaiClient,
+ name: "agent",
+ modelName: model,
+ systemMessage: "You are a helpful AI assistant")
+ .RegisterMessageConnector()
+ .RegisterMiddleware(async (messages, option, innerAgent, ct) =>
+ {
+ var today = DateTime.UtcNow;
+ var todayMessage = new TextMessage(Role.System, $"Today is {today:yyyy-MM-dd}");
+ messages = messages.Concat(new[] { todayMessage });
+ return await innerAgent.GenerateReplyAsync(messages, option, ct);
+ })
+ .RegisterPrintMessage();
+ #endregion Create_Agent
+
+ #region Chat_With_Agent
+ var reply = await agent.SendAsync("what's the date today");
+ #endregion Chat_With_Agent
+ }
+}
diff --git a/dotnet/sample/AutoGen.BasicSamples/GettingStart/Chat_With_Agent.cs b/dotnet/sample/AutoGen.BasicSamples/GettingStart/Chat_With_Agent.cs
new file mode 100644
index 000000000000..0ac1cda75288
--- /dev/null
+++ b/dotnet/sample/AutoGen.BasicSamples/GettingStart/Chat_With_Agent.cs
@@ -0,0 +1,59 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Chat_With_Agent.cs
+
+#region Using
+using AutoGen.Core;
+using AutoGen.OpenAI;
+using AutoGen.OpenAI.Extension;
+using Azure.AI.OpenAI;
+#endregion Using
+
+using FluentAssertions;
+
+namespace AutoGen.BasicSample;
+
+public class Chat_With_Agent
+{
+ public static async Task RunAsync()
+ {
+ #region Create_Agent
+ var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable.");
+ var model = "gpt-3.5-turbo";
+ var openaiClient = new OpenAIClient(apiKey);
+ var agent = new OpenAIChatAgent(
+ openAIClient: openaiClient,
+ name: "agent",
+ modelName: model,
+ systemMessage: "You are a helpful AI assistant")
+ .RegisterMessageConnector(); // convert OpenAI message to AutoGen message
+ #endregion Create_Agent
+
+ #region Chat_With_Agent
+ var reply = await agent.SendAsync("Tell me a joke");
+ reply.Should().BeOfType();
+ if (reply is TextMessage textMessage)
+ {
+ Console.WriteLine(textMessage.Content);
+ }
+ #endregion Chat_With_Agent
+
+ #region Chat_With_History
+ reply = await agent.SendAsync("summarize the conversation", chatHistory: [reply]);
+ #endregion Chat_With_History
+
+ #region Streaming_Chat
+ var question = new TextMessage(Role.User, "Tell me a long joke");
+ await foreach (var streamingReply in agent.GenerateStreamingReplyAsync([question]))
+ {
+ if (streamingReply is TextMessageUpdate textMessageUpdate)
+ {
+ Console.WriteLine(textMessageUpdate.Content);
+ }
+ }
+ #endregion Streaming_Chat
+
+ #region verify_reply
+ reply.Should().BeOfType();
+ #endregion verify_reply
+ }
+}
diff --git a/dotnet/sample/AutoGen.BasicSamples/GettingStart/Dynamic_Group_Chat.cs b/dotnet/sample/AutoGen.BasicSamples/GettingStart/Dynamic_Group_Chat.cs
new file mode 100644
index 000000000000..9d21bbde7d30
--- /dev/null
+++ b/dotnet/sample/AutoGen.BasicSamples/GettingStart/Dynamic_Group_Chat.cs
@@ -0,0 +1,91 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Dynamic_GroupChat.cs
+
+using AutoGen.Core;
+using AutoGen.OpenAI;
+using AutoGen.OpenAI.Extension;
+using AutoGen.SemanticKernel;
+using AutoGen.SemanticKernel.Extension;
+using Azure.AI.OpenAI;
+using Microsoft.SemanticKernel;
+
+namespace AutoGen.BasicSample;
+
+public class Dynamic_Group_Chat
+{
+ public static async Task RunAsync()
+ {
+ var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable.");
+ var model = "gpt-3.5-turbo";
+
+ #region Create_Coder
+ var openaiClient = new OpenAIClient(apiKey);
+ var coder = new OpenAIChatAgent(
+ openAIClient: openaiClient,
+ name: "coder",
+ modelName: model,
+ systemMessage: "You are a C# coder, when writing csharp code, please put the code between ```csharp and ```")
+ .RegisterMessageConnector() // convert OpenAI message to AutoGen message
+ .RegisterPrintMessage(); // print the message content
+ #endregion Create_Coder
+
+ #region Create_Commenter
+ var kernel = Kernel
+ .CreateBuilder()
+ .AddOpenAIChatCompletion(modelId: model, apiKey: apiKey)
+ .Build();
+ var commenter = new SemanticKernelAgent(
+ kernel: kernel,
+ name: "commenter",
+ systemMessage: "You write inline comments for the code snippet and add unit tests if necessary")
+ .RegisterMessageConnector() // register message connector so it support AutoGen built-in message types like TextMessage.
+ .RegisterPrintMessage(); // pretty print the message to the console
+ #endregion Create_Commenter
+
+ #region Create_UserProxy
+ var userProxy = new DefaultReplyAgent("user", defaultReply: "END")
+ .RegisterPrintMessage(); // print the message content
+ #endregion Create_UserProxy
+
+ #region Create_Group
+ var admin = new OpenAIChatAgent(
+ openAIClient: openaiClient,
+ name: "admin",
+ modelName: model)
+ .RegisterMessageConnector(); // convert OpenAI message to AutoGen message
+
+ var group = new GroupChat(
+ members: [coder, commenter, userProxy],
+ admin: admin);
+ #endregion Create_Group
+
+ #region Chat_With_Group
+ var workflowInstruction = new TextMessage(
+ Role.User,
+ """
+ Here is the workflow of this group chat:
+ User{Ask a question} -> Coder{Write code}
+ Coder{Write code} -> Commenter{Add comments to the code}
+ Commenter{Add comments to the code} -> User{END}
+ """);
+
+ var question = new TextMessage(Role.User, "How to calculate the 100th Fibonacci number?");
+ var chatHistory = new List { workflowInstruction, question };
+ while (true)
+ {
+ var replies = await group.CallAsync(chatHistory, maxRound: 1);
+ var lastReply = replies.Last();
+ chatHistory.Add(lastReply);
+
+ if (lastReply.From == userProxy.Name)
+ {
+ break;
+ }
+ }
+ #endregion Chat_With_Group
+
+ #region Summarize_Chat_History
+ var summary = await coder.SendAsync("summarize the conversation", chatHistory: chatHistory);
+ #endregion Summarize_Chat_History
+ }
+}
diff --git a/dotnet/sample/AutoGen.BasicSamples/GettingStart/FSM_Group_Chat.cs b/dotnet/sample/AutoGen.BasicSamples/GettingStart/FSM_Group_Chat.cs
new file mode 100644
index 000000000000..59c0aa9ca88b
--- /dev/null
+++ b/dotnet/sample/AutoGen.BasicSamples/GettingStart/FSM_Group_Chat.cs
@@ -0,0 +1,201 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// FSM_Group_Chat.cs
+
+using System.Text;
+#region Using
+using AutoGen.Core;
+using AutoGen.OpenAI;
+using AutoGen.OpenAI.Extension;
+using Azure.AI.OpenAI;
+#endregion Using
+
+namespace AutoGen.BasicSample;
+
+#region FillFormTool
+public partial class FillFormTool
+{
+ private string? name = null;
+ private string? email = null;
+ private string? phone = null;
+ private string? address = null;
+ private bool? receiveUpdates = null;
+
+ [Function]
+ public async Task SaveProgress(
+ string name,
+ string email,
+ string phone,
+ string address,
+ bool? receiveUpdates)
+ {
+ this.name = !string.IsNullOrEmpty(name) ? name : this.name;
+ this.email = !string.IsNullOrEmpty(email) ? email : this.email;
+ this.phone = !string.IsNullOrEmpty(phone) ? phone : this.phone;
+ this.address = !string.IsNullOrEmpty(address) ? address : this.address;
+ this.receiveUpdates = receiveUpdates ?? this.receiveUpdates;
+
+ var missingInformationStringBuilder = new StringBuilder();
+ if (string.IsNullOrEmpty(this.name))
+ {
+ missingInformationStringBuilder.AppendLine("Name is missing.");
+ }
+
+ if (string.IsNullOrEmpty(this.email))
+ {
+ missingInformationStringBuilder.AppendLine("Email is missing.");
+ }
+
+ if (string.IsNullOrEmpty(this.phone))
+ {
+ missingInformationStringBuilder.AppendLine("Phone is missing.");
+ }
+
+ if (string.IsNullOrEmpty(this.address))
+ {
+ missingInformationStringBuilder.AppendLine("Address is missing.");
+ }
+
+ if (this.receiveUpdates == null)
+ {
+ missingInformationStringBuilder.AppendLine("ReceiveUpdates is missing.");
+ }
+
+ if (missingInformationStringBuilder.Length > 0)
+ {
+ return missingInformationStringBuilder.ToString();
+ }
+ else
+ {
+ return "Application information is saved to database.";
+ }
+ }
+}
+#endregion FillFormTool
+
+public class FSM_Group_Chat
+{
+ public static async Task CreateSaveProgressAgent(OpenAIClient client, string model)
+ {
+ #region Create_Save_Progress_Agent
+ var tool = new FillFormTool();
+ var functionCallMiddleware = new FunctionCallMiddleware(
+ functions: [tool.SaveProgressFunctionContract],
+ functionMap: new Dictionary>>
+ {
+ { tool.SaveProgressFunctionContract.Name!, tool.SaveProgressWrapper },
+ });
+
+ var chatAgent = new OpenAIChatAgent(
+ openAIClient: client,
+ name: "application",
+ modelName: model,
+ systemMessage: """You are a helpful application form assistant who saves progress while user fills application.""")
+ .RegisterMessageConnector()
+ .RegisterMiddleware(functionCallMiddleware)
+ .RegisterMiddleware(async (msgs, option, agent, ct) =>
+ {
+ var lastUserMessage = msgs.Last() ?? throw new Exception("No user message found.");
+ var prompt = $"""
+ Save progress according to the most recent information provided by user.
+
+ ```user
+ {lastUserMessage.GetContent()}
+ ```
+ """;
+
+ return await agent.GenerateReplyAsync([lastUserMessage], option, ct);
+
+ });
+ #endregion Create_Save_Progress_Agent
+
+ return chatAgent;
+ }
+
+ public static async Task CreateAssistantAgent(OpenAIClient openaiClient, string model)
+ {
+ #region Create_Assistant_Agent
+ var chatAgent = new OpenAIChatAgent(
+ openAIClient: openaiClient,
+ name: "assistant",
+ modelName: model,
+ systemMessage: """You create polite prompt to ask user provide missing information""")
+ .RegisterMessageConnector()
+ .RegisterPrintMessage()
+ .RegisterMiddleware(async (msgs, option, agent, ct) =>
+ {
+ var lastReply = msgs.Last() ?? throw new Exception("No reply found.");
+ var reply = await agent.GenerateReplyAsync(msgs, option, ct);
+
+ // if application is complete, exit conversation by sending termination message
+ if (lastReply.GetContent()?.Contains("Application information is saved to database.") is true)
+ {
+ return new TextMessage(Role.Assistant, GroupChatExtension.TERMINATE, from: agent.Name);
+ }
+ else
+ {
+ return reply;
+ }
+ });
+ #endregion Create_Assistant_Agent
+ return chatAgent;
+ }
+
+ public static async Task CreateUserAgent(OpenAIClient openaiClient, string model)
+ {
+ #region Create_User_Agent
+ var chatAgent = new OpenAIChatAgent(
+ openAIClient: openaiClient,
+ name: "user",
+ modelName: model,
+ systemMessage: """
+ You are a user who is filling an application form. Simply provide the information as requested and answer the questions, don't do anything else.
+
+ here's some personal information about you:
+ - name: John Doe
+ - email: 1234567@gmail.com
+ - phone: 123-456-7890
+ - address: 1234 Main St, Redmond, WA 98052
+ - want to receive update? true
+ """)
+ .RegisterMessageConnector()
+ .RegisterPrintMessage();
+ #endregion Create_User_Agent
+ return chatAgent;
+ }
+
+ public static async Task RunAsync()
+ {
+ var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable.");
+ var model = "gpt-3.5-turbo";
+ var openaiClient = new OpenAIClient(apiKey);
+ var applicationAgent = await CreateSaveProgressAgent(openaiClient, model);
+ var assistantAgent = await CreateAssistantAgent(openaiClient, model);
+ var userAgent = await CreateUserAgent(openaiClient, model);
+
+ #region Create_Graph
+ var userToApplicationTransition = Transition.Create(userAgent, applicationAgent);
+ var applicationToAssistantTransition = Transition.Create(applicationAgent, assistantAgent);
+ var assistantToUserTransition = Transition.Create(assistantAgent, userAgent);
+
+ var workflow = new Graph(
+ [
+ userToApplicationTransition,
+ applicationToAssistantTransition,
+ assistantToUserTransition,
+ ]);
+ #endregion Create_Graph
+
+ #region Group_Chat
+ var groupChat = new GroupChat(
+ members: [userAgent, applicationAgent, assistantAgent],
+ workflow: workflow);
+ #endregion Group_Chat
+
+ var initialMessage = await assistantAgent.SendAsync("Generate a greeting meesage for user and start the conversation by asking what's their name.");
+
+ var chatHistory = await userAgent.SendMessageToGroupAsync(groupChat, [initialMessage], maxRound: 30);
+
+ var lastMessage = chatHistory.Last();
+ Console.WriteLine(lastMessage.GetContent());
+ }
+}
diff --git a/dotnet/sample/AutoGen.BasicSamples/GettingStart/Image_Chat_With_Agent.cs b/dotnet/sample/AutoGen.BasicSamples/GettingStart/Image_Chat_With_Agent.cs
new file mode 100644
index 000000000000..3352f90d9211
--- /dev/null
+++ b/dotnet/sample/AutoGen.BasicSamples/GettingStart/Image_Chat_With_Agent.cs
@@ -0,0 +1,50 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Image_Chat_With_Agent.cs
+
+using AutoGen.Core;
+using AutoGen.OpenAI;
+using AutoGen.OpenAI.Extension;
+using Azure.AI.OpenAI;
+using FluentAssertions;
+
+namespace AutoGen.BasicSample;
+
+public class Image_Chat_With_Agent
+{
+ public static async Task RunAsync()
+ {
+ #region Create_Agent
+ var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable.");
+ var model = "gpt-4o"; // The model needs to support multimodal inputs
+ var openaiClient = new OpenAIClient(apiKey);
+
+ var agent = new OpenAIChatAgent(
+ openAIClient: openaiClient,
+ name: "agent",
+ modelName: model,
+ systemMessage: "You are a helpful AI assistant")
+ .RegisterMessageConnector() // convert OpenAI message to AutoGen message
+ .RegisterPrintMessage();
+ #endregion Create_Agent
+
+ #region Prepare_Image_Input
+ var backgoundImagePath = Path.Combine("resource", "images", "background.png");
+ var imageBytes = File.ReadAllBytes(backgoundImagePath);
+ var imageMessage = new ImageMessage(Role.User, BinaryData.FromBytes(imageBytes, "image/png"));
+ #endregion Prepare_Image_Input
+
+ #region Chat_With_Agent
+ var reply = await agent.SendAsync("what's in the picture", chatHistory: [imageMessage]);
+ #endregion Chat_With_Agent
+
+ #region Prepare_Multimodal_Input
+ var textMessage = new TextMessage(Role.User, "what's in the picture");
+ var multimodalMessage = new MultiModalMessage(Role.User, [textMessage, imageMessage]);
+ reply = await agent.SendAsync(multimodalMessage);
+ #endregion Prepare_Multimodal_Input
+
+ #region verify_reply
+ reply.Should().BeOfType();
+ #endregion verify_reply
+ }
+}
diff --git a/dotnet/sample/AutoGen.BasicSamples/GettingStart/Use_Tools_With_Agent.cs b/dotnet/sample/AutoGen.BasicSamples/GettingStart/Use_Tools_With_Agent.cs
new file mode 100644
index 000000000000..f1a230c123b1
--- /dev/null
+++ b/dotnet/sample/AutoGen.BasicSamples/GettingStart/Use_Tools_With_Agent.cs
@@ -0,0 +1,85 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Use_Tools_With_Agent.cs
+
+#region Using
+using AutoGen.Core;
+using AutoGen.OpenAI;
+using AutoGen.OpenAI.Extension;
+using Azure.AI.OpenAI;
+#endregion Using
+using FluentAssertions;
+
+namespace AutoGen.BasicSample;
+
+public partial class Tools
+{
+ ///
+ /// Get the weather of the city.
+ ///
+ ///
+ [Function]
+ public async Task GetWeather(string city)
+ {
+ return $"The weather in {city} is sunny.";
+ }
+}
+public class Use_Tools_With_Agent
+{
+ public static async Task RunAsync()
+ {
+ #region Create_tools
+ var tools = new Tools();
+ #endregion Create_tools
+
+ #region Create_Agent
+ var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable.");
+ var model = "gpt-3.5-turbo";
+ var openaiClient = new OpenAIClient(apiKey);
+ var functionCallMiddleware = new FunctionCallMiddleware(
+ functions: [tools.GetWeatherFunctionContract],
+ functionMap: new Dictionary>>()
+ {
+ { tools.GetWeatherFunctionContract.Name!, tools.GetWeatherWrapper },
+ });
+ var agent = new OpenAIChatAgent(
+ openAIClient: openaiClient,
+ name: "agent",
+ modelName: model,
+ systemMessage: "You are a helpful AI assistant")
+ .RegisterMessageConnector() // convert OpenAI message to AutoGen message
+ .RegisterMiddleware(functionCallMiddleware) // pass function definition to agent.
+ .RegisterPrintMessage(); // print the message content
+ #endregion Create_Agent
+
+ #region Single_Turn_Tool_Call
+ var question = new TextMessage(Role.User, "What is the weather in Seattle?");
+ var toolCallReply = await agent.SendAsync(question);
+ #endregion Single_Turn_Tool_Call
+
+ #region verify_too_call_reply
+ toolCallReply.Should().BeOfType();
+ #endregion verify_too_call_reply
+
+ #region Multi_Turn_Tool_Call
+ var finalReply = await agent.SendAsync(chatHistory: [question, toolCallReply]);
+ #endregion Multi_Turn_Tool_Call
+
+ #region verify_reply
+ finalReply.Should().BeOfType();
+ #endregion verify_reply
+
+ #region parallel_tool_call
+ question = new TextMessage(Role.User, "What is the weather in Seattle, New York and Vancouver");
+ toolCallReply = await agent.SendAsync(question);
+ #endregion parallel_tool_call
+
+ #region verify_parallel_tool_call_reply
+ toolCallReply.Should().BeOfType();
+ (toolCallReply as ToolCallAggregateMessage)!.Message1.ToolCalls.Count().Should().Be(3);
+ #endregion verify_parallel_tool_call_reply
+
+ #region Multi_Turn_Parallel_Tool_Call
+ finalReply = await agent.SendAsync(chatHistory: [question, toolCallReply]);
+ #endregion Multi_Turn_Parallel_Tool_Call
+ }
+}
diff --git a/dotnet/sample/AutoGen.BasicSamples/ImageResources/square.png b/dotnet/sample/AutoGen.BasicSamples/ImageResources/square.png
deleted file mode 100644
index afb4f4cd4df8..000000000000
--- a/dotnet/sample/AutoGen.BasicSamples/ImageResources/square.png
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:8323d0b8eceb752e14c29543b2e28bb2fc648ed9719095c31b7708867a4dc918
-size 491