From 51181dc106b5f8c5fd1d232bcaf28edab55c129c Mon Sep 17 00:00:00 2001 From: tylerreed Date: Fri, 3 May 2024 20:43:55 -0400 Subject: [PATCH] updated to newer function calling --- autogen_functions/autogen_function_example.py | 2 +- autogen_functions/autogen_function_planner.py | 91 +++++++------------ 2 files changed, 35 insertions(+), 58 deletions(-) diff --git a/autogen_functions/autogen_function_example.py b/autogen_functions/autogen_function_example.py index 1343abe..bfa1aa4 100644 --- a/autogen_functions/autogen_function_example.py +++ b/autogen_functions/autogen_function_example.py @@ -3,7 +3,7 @@ import autogen config_list = autogen.config_list_from_json( - env_or_file="OAI_CONFIG_LIST", + env_or_file="config/OAI_CONFIG_LIST.json", filter_dict={ "model": ["gpt-4"] }, diff --git a/autogen_functions/autogen_function_planner.py b/autogen_functions/autogen_function_planner.py index 2a60137..1d78c7d 100644 --- a/autogen_functions/autogen_function_planner.py +++ b/autogen_functions/autogen_function_planner.py @@ -1,61 +1,20 @@ import autogen +from typing import Annotated -# config_list = autogen.config_list_from_json( -# env_or_file="OAI_CONFIG_LIST", -# filter_dict={ -# "model": ["gpt-4"] -# }, -# ) - -config_list = { - "model": "NULL", # not needed - # NOTE: on versions of pyautogen < 0.2.0 use "api_base", and also uncomment "api_type" - # "api_base": "http://localhost:1234/v1", - # "api_type": "open_ai", - "base_url": "http://localhost:1234/v1", # ex. "http://127.0.0.1:5001/v1" if you are using webui, "http://localhost:1234/v1/" if you are using LM Studio - "api_key": "NULL", # not needed -} - -# print(config_list) +config_list = autogen.config_list_from_json( + env_or_file="config/OAI_CONFIG_LIST.json", + filter_dict={ + "model": ["gpt-3.5-turbo"] + }, +) llm_config = { "temperature": 0, "timeout": 300, - "seed": 43, - "config_list": config_list, - "functions": [ - { - "name": "ask_planner", - "description": "ask planner to: 1. get a plan for finishing a task, 2. verify the execution result of " - "the plan and potentially suggest new plan.", - "parameters": { - "type": "object", - "properties": { - "message": { - "type": "string", - "description": "question to ask planner. Make sure the question include enough context, " - "such as the code and the execution result. The planner does not know the " - "conversation between you and the user, unless you share the conversation " - "with the planner.", - }, - }, - "required": ["message"], - }, - }, - ], + "seed": 44, + "config_list": config_list } - -def ask_planner(message): - planner_user.initiate_chat(planner, message=message) - # return the last message received from the planner - last_message = planner_user.last_message()["content"] - print("About to just get the first 10 characters of the message!") - print("Here is where you can do something with the message that we received.") - print(last_message[:10]) - return planner_user.last_message()["content"] - - planner = autogen.AssistantAgent( name="planner", llm_config={ @@ -73,6 +32,7 @@ def ask_planner(message): name="planner_user", max_consecutive_auto_reply=0, # terminate without auto-reply human_input_mode="NEVER", + code_execution_config={"work_dir": "planning", "use_docker": False}, ) # create an AssistantAgent instance named "assistant" @@ -81,6 +41,12 @@ def ask_planner(message): llm_config=llm_config ) +# create an AssistantAgent instance named "assistant" +assistant2 = autogen.AssistantAgent( + name="assistant", + llm_config=llm_config +) + # create a UserProxyAgent instance named "user_proxy" user_proxy = autogen.UserProxyAgent( name="user_proxy", @@ -89,15 +55,26 @@ def ask_planner(message): is_termination_msg=lambda x: "content" in x and x["content"] is not None and x["content"].rstrip().endswith( "TERMINATE"), code_execution_config={"work_dir": "planning", "use_docker": False}, - function_map={"ask_planner": ask_planner}, ) -# can also register functions to an agent this way -# user_proxy.register_function( -# function_map={ -# "ask_planner": ask_planner -# } -# ) + +@user_proxy.register_for_execution() +@assistant.register_for_llm(description="Get the first 10 characters of the message") +def ask_planner(message: Annotated[str, "The response from the LLM"]) -> str: + planner_user.initiate_chat(planner, message=message) + # return the last message received from the planner + last_message = planner_user.last_message()["content"] + print("About to just get the first 10 characters of the message!") + print("Here is where you can do something with the message that we received.") + print(last_message[:10]) + return planner_user.last_message()["content"] + + +# @user_proxy.register_for_execution() +# @assistant2.register_for_llm(description="Do something") +# def do_something(message: Annotated[str, "The response from the LLM"]) -> str: +# return "Hello there" + # the assistant receives a message from the user, which contains the task description user_proxy.initiate_chat(