Skip to content

Commit

Permalink
updated to newer function calling
Browse files Browse the repository at this point in the history
  • Loading branch information
tylerprogramming committed May 4, 2024
1 parent 2ed7849 commit 51181dc
Show file tree
Hide file tree
Showing 2 changed files with 35 additions and 58 deletions.
2 changes: 1 addition & 1 deletion autogen_functions/autogen_function_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import autogen

config_list = autogen.config_list_from_json(
env_or_file="OAI_CONFIG_LIST",
env_or_file="config/OAI_CONFIG_LIST.json",
filter_dict={
"model": ["gpt-4"]
},
Expand Down
91 changes: 34 additions & 57 deletions autogen_functions/autogen_function_planner.py
Original file line number Diff line number Diff line change
@@ -1,61 +1,20 @@
import autogen
from typing import Annotated

# config_list = autogen.config_list_from_json(
# env_or_file="OAI_CONFIG_LIST",
# filter_dict={
# "model": ["gpt-4"]
# },
# )

config_list = {
"model": "NULL", # not needed
# NOTE: on versions of pyautogen < 0.2.0 use "api_base", and also uncomment "api_type"
# "api_base": "http://localhost:1234/v1",
# "api_type": "open_ai",
"base_url": "http://localhost:1234/v1", # ex. "http://127.0.0.1:5001/v1" if you are using webui, "http://localhost:1234/v1/" if you are using LM Studio
"api_key": "NULL", # not needed
}

# print(config_list)
config_list = autogen.config_list_from_json(
env_or_file="config/OAI_CONFIG_LIST.json",
filter_dict={
"model": ["gpt-3.5-turbo"]
},
)

llm_config = {
"temperature": 0,
"timeout": 300,
"seed": 43,
"config_list": config_list,
"functions": [
{
"name": "ask_planner",
"description": "ask planner to: 1. get a plan for finishing a task, 2. verify the execution result of "
"the plan and potentially suggest new plan.",
"parameters": {
"type": "object",
"properties": {
"message": {
"type": "string",
"description": "question to ask planner. Make sure the question include enough context, "
"such as the code and the execution result. The planner does not know the "
"conversation between you and the user, unless you share the conversation "
"with the planner.",
},
},
"required": ["message"],
},
},
],
"seed": 44,
"config_list": config_list
}


def ask_planner(message):
planner_user.initiate_chat(planner, message=message)
# return the last message received from the planner
last_message = planner_user.last_message()["content"]
print("About to just get the first 10 characters of the message!")
print("Here is where you can do something with the message that we received.")
print(last_message[:10])
return planner_user.last_message()["content"]


planner = autogen.AssistantAgent(
name="planner",
llm_config={
Expand All @@ -73,6 +32,7 @@ def ask_planner(message):
name="planner_user",
max_consecutive_auto_reply=0, # terminate without auto-reply
human_input_mode="NEVER",
code_execution_config={"work_dir": "planning", "use_docker": False},
)

# create an AssistantAgent instance named "assistant"
Expand All @@ -81,6 +41,12 @@ def ask_planner(message):
llm_config=llm_config
)

# create an AssistantAgent instance named "assistant"
assistant2 = autogen.AssistantAgent(
name="assistant",
llm_config=llm_config
)

# create a UserProxyAgent instance named "user_proxy"
user_proxy = autogen.UserProxyAgent(
name="user_proxy",
Expand All @@ -89,15 +55,26 @@ def ask_planner(message):
is_termination_msg=lambda x: "content" in x and x["content"] is not None and x["content"].rstrip().endswith(
"TERMINATE"),
code_execution_config={"work_dir": "planning", "use_docker": False},
function_map={"ask_planner": ask_planner},
)

# can also register functions to an agent this way
# user_proxy.register_function(
# function_map={
# "ask_planner": ask_planner
# }
# )

@user_proxy.register_for_execution()
@assistant.register_for_llm(description="Get the first 10 characters of the message")
def ask_planner(message: Annotated[str, "The response from the LLM"]) -> str:
planner_user.initiate_chat(planner, message=message)
# return the last message received from the planner
last_message = planner_user.last_message()["content"]
print("About to just get the first 10 characters of the message!")
print("Here is where you can do something with the message that we received.")
print(last_message[:10])
return planner_user.last_message()["content"]


# @user_proxy.register_for_execution()
# @assistant2.register_for_llm(description="Do something")
# def do_something(message: Annotated[str, "The response from the LLM"]) -> str:
# return "Hello there"


# the assistant receives a message from the user, which contains the task description
user_proxy.initiate_chat(
Expand Down

0 comments on commit 51181dc

Please sign in to comment.