Skip to content

Commit

Permalink
Merge branch 'main' into users/david/ollama-tools
Browse files Browse the repository at this point in the history
  • Loading branch information
LittleLittleCloud authored Sep 6, 2024
2 parents 7e328d0 + 1ff477f commit cc5d24e
Show file tree
Hide file tree
Showing 34 changed files with 2,331 additions and 1,064 deletions.
2 changes: 1 addition & 1 deletion autogen/agentchat/contrib/agent_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ class AgentBuilder:
"""

AGENT_NAME_PROMPT = """# Your task
Suggest no more then {max_agents} experts with their name according to the following user requirement.
Suggest no more than {max_agents} experts with their name according to the following user requirement.
## User requirement
{task}
Expand Down
Empty file.
24 changes: 24 additions & 0 deletions autogen/agentchat/contrib/graph_rag/document.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
from dataclasses import dataclass
from enum import Enum, auto
from typing import Optional


class DocumentType(Enum):
"""
Enum for supporting document type.
"""

TEXT = auto()
HTML = auto()
PDF = auto()


@dataclass
class Document:
"""
A wrapper of graph store query results.
"""

doctype: DocumentType
data: Optional[object] = None
path_or_url: Optional[str] = ""
51 changes: 51 additions & 0 deletions autogen/agentchat/contrib/graph_rag/graph_query_engine.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
from dataclasses import dataclass, field
from typing import List, Optional, Protocol

from .document import Document


@dataclass
class GraphStoreQueryResult:
"""
A wrapper of graph store query results.
answer: human readable answer to question/query.
results: intermediate results to question/query, e.g. node entities.
"""

answer: Optional[str] = None
results: list = field(default_factory=list)


class GraphQueryEngine(Protocol):
"""An abstract base class that represents a graph query engine on top of a underlying graph database.
This interface defines the basic methods for graph rag.
"""

def init_db(self, input_doc: List[Document] | None = None):
"""
This method initializes graph database with the input documents or records.
Usually, it takes the following steps,
1. connecting to a graph database.
2. extract graph nodes, edges based on input data, graph schema and etc.
3. build indexes etc.
Args:
input_doc: a list of input documents that are used to build the graph in database.
Returns: GraphStore
"""
pass

def add_records(self, new_records: List) -> bool:
"""
Add new records to the underlying database and add to the graph if required.
"""
pass

def query(self, question: str, n_results: int = 1, **kwargs) -> GraphStoreQueryResult:
"""
This method transform a string format question into database query and return the result.
"""
pass
56 changes: 56 additions & 0 deletions autogen/agentchat/contrib/graph_rag/graph_rag_capability.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
from autogen.agentchat.contrib.capabilities.agent_capability import AgentCapability
from autogen.agentchat.conversable_agent import ConversableAgent

from .graph_query_engine import GraphQueryEngine


class GraphRagCapability(AgentCapability):
"""
A graph rag capability uses a graph query engine to give a conversable agent the graph rag ability.
An agent class with graph rag capability could
1. create a graph in the underlying database with input documents.
2. retrieved relevant information based on messages received by the agent.
3. generate answers from retrieved information and send messages back.
For example,
graph_query_engine = GraphQueryEngine(...)
graph_query_engine.init_db([Document(doc1), Document(doc2), ...])
graph_rag_agent = ConversableAgent(
name="graph_rag_agent",
max_consecutive_auto_reply=3,
...
)
graph_rag_capability = GraphRagCapbility(graph_query_engine)
graph_rag_capability.add_to_agent(graph_rag_agent)
user_proxy = UserProxyAgent(
name="user_proxy",
code_execution_config=False,
is_termination_msg=lambda msg: "TERMINATE" in msg["content"],
human_input_mode="ALWAYS",
)
user_proxy.initiate_chat(graph_rag_agent, message="Name a few actors who've played in 'The Matrix'")
# ChatResult(
# chat_id=None,
# chat_history=[
# {'content': 'Name a few actors who've played in \'The Matrix\'', 'role': 'graph_rag_agent'},
# {'content': 'A few actors who have played in The Matrix are:
# - Keanu Reeves
# - Laurence Fishburne
# - Carrie-Anne Moss
# - Hugo Weaving',
# 'role': 'user_proxy'},
# ...)
"""

def __init__(self, query_engine: GraphQueryEngine):
"""
initialize graph rag capability with a graph query engine
"""
...

def add_to_agent(self, agent: ConversableAgent): ...
15 changes: 13 additions & 2 deletions autogen/coding/local_commandline_code_executor.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,12 @@ def _setup_functions(self) -> None:
cmd = [py_executable, "-m", "pip", "install"] + required_packages
try:
result = subprocess.run(
cmd, cwd=self._work_dir, capture_output=True, text=True, timeout=float(self._timeout)
cmd,
cwd=self._work_dir,
capture_output=True,
text=True,
timeout=float(self._timeout),
encoding="utf-8",
)
except subprocess.TimeoutExpired as e:
raise ValueError("Pip install timed out") from e
Expand Down Expand Up @@ -303,7 +308,13 @@ def _execute_code_dont_check_setup(self, code_blocks: List[CodeBlock]) -> Comman

try:
result = subprocess.run(
cmd, cwd=self._work_dir, capture_output=True, text=True, timeout=float(self._timeout), env=env
cmd,
cwd=self._work_dir,
capture_output=True,
text=True,
timeout=float(self._timeout),
env=env,
encoding="utf-8",
)
except subprocess.TimeoutExpired:
logs_all += "\n" + TIMEOUT_MSG
Expand Down
2 changes: 1 addition & 1 deletion autogen/oai/anthropic.py
Original file line number Diff line number Diff line change
Expand Up @@ -314,7 +314,7 @@ def oai_messages_to_anthropic_messages(params: Dict[str, Any]) -> list[dict[str,
last_tool_result_index = -1
for message in params["messages"]:
if message["role"] == "system":
params["system"] = message["content"]
params["system"] = params.get("system", "") + (" " if "system" in params else "") + message["content"]
else:
# New messages will be added here, manage role alternations
expected_role = "user" if len(processed_messages) % 2 == 0 else "assistant"
Expand Down
2 changes: 1 addition & 1 deletion dotnet/eng/MetaInfo.props
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<PropertyGroup>
<VersionPrefix>0.1.0</VersionPrefix>
<VersionPrefix>0.2.0</VersionPrefix>
<Authors>AutoGen</Authors>
<PackageProjectUrl>https://microsoft.github.io/autogen-for-net/</PackageProjectUrl>
<RepositoryUrl>https://github.com/microsoft/autogen</RepositoryUrl>
Expand Down
77 changes: 40 additions & 37 deletions dotnet/sample/AutoGen.OpenAI.Sample/Structural_Output.cs
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,10 @@
using Json.Schema;
using Json.Schema.Generation;
using OpenAI;
using OpenAI.Chat;

namespace AutoGen.OpenAI.Sample;

internal class Structural_Output
public class Structural_Output
{
public static async Task RunAsync()
{
Expand All @@ -23,24 +22,25 @@ public static async Task RunAsync()

var schemaBuilder = new JsonSchemaBuilder().FromType<Person>();
var schema = schemaBuilder.Build();

var personSchemaFormat = ChatResponseFormat.CreateJsonSchemaFormat(
name: "Person",
jsonSchema: BinaryData.FromObjectAsJson(schema),
description: "Person schema");

var openAIClient = new OpenAIClient(apiKey);
var openAIClientAgent = new OpenAIChatAgent(
chatClient: openAIClient.GetChatClient(model),
name: "assistant",
systemMessage: "You are a helpful assistant",
responseFormat: personSchemaFormat) // structural output by passing schema to response format
systemMessage: "You are a helpful assistant")
.RegisterMessageConnector()
.RegisterPrintMessage();
#endregion create_agent

#region chat_with_agent
var reply = await openAIClientAgent.SendAsync("My name is John, I am 25 years old, and I live in Seattle. I like to play soccer and read books.");
var prompt = new TextMessage(Role.User, """
My name is John, I am 25 years old, and I live in Seattle. I like to play soccer and read books.
""");
var reply = await openAIClientAgent.GenerateReplyAsync(
messages: [prompt],
options: new GenerateReplyOptions
{
OutputSchema = schema,
});

var person = JsonSerializer.Deserialize<Person>(reply.GetContent());
Console.WriteLine($"Name: {person.Name}");
Expand All @@ -60,31 +60,34 @@ public static async Task RunAsync()
person.City.Should().Be("Seattle");
person.Hobbies.Count.Should().Be(2);
}
}

#region person_class
public class Person
{
[JsonPropertyName("name")]
[Description("Name of the person")]
[Required]
public string Name { get; set; }

[JsonPropertyName("age")]
[Description("Age of the person")]
[Required]
public int Age { get; set; }

[JsonPropertyName("city")]
[Description("City of the person")]
public string? City { get; set; }

[JsonPropertyName("address")]
[Description("Address of the person")]
public string? Address { get; set; }

[JsonPropertyName("hobbies")]
[Description("Hobbies of the person")]
public List<string>? Hobbies { get; set; }

#region person_class
[Title("Person")]
public class Person
{
[JsonPropertyName("name")]
[Description("Name of the person")]
[Required]
public string Name { get; set; }

[JsonPropertyName("age")]
[Description("Age of the person")]
[Required]
public int Age { get; set; }

[JsonPropertyName("city")]
[Description("City of the person")]
public string? City { get; set; }

[JsonPropertyName("address")]
[Description("Address of the person")]
public string? Address { get; set; }

[JsonPropertyName("hobbies")]
[Description("Hobbies of the person")]
public List<string>? Hobbies { get; set; }
}
#endregion person_class

}
#endregion person_class
27 changes: 14 additions & 13 deletions dotnet/sample/AutoGen.OpenAI.Sample/Use_Json_Mode.cs
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,12 @@
using System.Text.Json;
using System.Text.Json.Serialization;
using AutoGen.Core;
using AutoGen.OpenAI;
using AutoGen.OpenAI.Extension;
using FluentAssertions;
using OpenAI;
using OpenAI.Chat;

namespace AutoGen.BasicSample;
namespace AutoGen.OpenAI.Sample;

public class Use_Json_Mode
{
Expand Down Expand Up @@ -50,18 +49,20 @@ public static async Task RunAsync()
person.Age.Should().Be(25);
person.Address.Should().BeNullOrEmpty();
}
}

#region person_class
public class Person
{
[JsonPropertyName("name")]
public string Name { get; set; }

[JsonPropertyName("age")]
public int Age { get; set; }
#region person_class
public class Person
{
[JsonPropertyName("name")]
public string Name { get; set; }

[JsonPropertyName("age")]
public int Age { get; set; }

[JsonPropertyName("address")]
public string Address { get; set; }
[JsonPropertyName("address")]
public string Address { get; set; }
}
#endregion person_class
}
#endregion person_class

7 changes: 7 additions & 0 deletions dotnet/src/AutoGen.Core/Agent/IAgent.cs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Json.Schema;

namespace AutoGen.Core;

Expand Down Expand Up @@ -42,6 +43,7 @@ public GenerateReplyOptions(GenerateReplyOptions other)
this.MaxToken = other.MaxToken;
this.StopSequence = other.StopSequence?.Select(s => s)?.ToArray();
this.Functions = other.Functions?.Select(f => f)?.ToArray();
this.OutputSchema = other.OutputSchema;
}

public float? Temperature { get; set; }
Expand All @@ -51,4 +53,9 @@ public GenerateReplyOptions(GenerateReplyOptions other)
public string[]? StopSequence { get; set; }

public FunctionContract[]? Functions { get; set; }

/// <summary>
/// Structural schema for the output. This property only applies to certain LLMs.
/// </summary>
public JsonSchema? OutputSchema { get; set; }
}
Original file line number Diff line number Diff line change
Expand Up @@ -335,7 +335,10 @@ private IEnumerable<ChatRequestMessage> ProcessToolCallMessage(IAgent agent, Too

var toolCall = message.ToolCalls.Select((tc, i) => new ChatCompletionsFunctionToolCall(tc.ToolCallId ?? $"{tc.FunctionName}_{i}", tc.FunctionName, tc.FunctionArguments));
var textContent = message.GetContent() ?? string.Empty;
var chatRequestMessage = new ChatRequestAssistantMessage(textContent) { Name = message.From };

// don't include the name field when it's tool call message.
// fix https://github.com/microsoft/autogen/issues/3437
var chatRequestMessage = new ChatRequestAssistantMessage(textContent);
foreach (var tc in toolCall)
{
chatRequestMessage.ToolCalls.Add(tc);
Expand Down
Loading

0 comments on commit cc5d24e

Please sign in to comment.