forked from tylerprogramming/ai
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
2788da3
commit 87bef1d
Showing
7 changed files
with
175 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,6 @@ | ||
[ | ||
{ | ||
"model": "gpt-3.5-turbo", | ||
"api_key": "sk-1111" | ||
} | ||
] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,85 @@ | ||
# imports | ||
from typing import Annotated | ||
import autogen | ||
|
||
# define the tasks | ||
task1 = """ | ||
Find arxiv papers that show how are people studying trust calibration in AI based systems | ||
""" | ||
task2 = """ | ||
Analyze the above the results to list the application domains studied by these papers. | ||
""" | ||
task3 = """ | ||
Use this data to generate a bar chart of domains and number of papers in that domain and save to a file | ||
""" | ||
task4 = """ | ||
Reflect on the sequence and create a recipe containing all the steps | ||
necessary and name for it. Suggest well-documented, generalized python function(s) | ||
to perform similar tasks for coding steps in future. Make sure coding steps and | ||
non-coding steps are never mixed in one function. In the docstr of the function(s), | ||
clarify what non-coding steps are needed to use the language skill of the assistant. | ||
""" | ||
|
||
# create the llm config | ||
llm_config = { | ||
"timeout": 120, | ||
"cache_seed": 43, | ||
"config_list": autogen.config_list_from_json( | ||
"OAI_CONFIG_LIST.json", | ||
filter_dict={"model": ["gpt-3.5-turbo"]}, | ||
), | ||
"temperature": 0, | ||
} | ||
|
||
# create the agents | ||
assistant = autogen.AssistantAgent( | ||
name="Assistant", | ||
llm_config=llm_config, | ||
is_termination_msg=lambda x: True if "TERMINATE" in x.get("content") else False | ||
) | ||
|
||
assistant_create_recipe = autogen.AssistantAgent( | ||
name="Recipe Assistant", | ||
llm_config=llm_config, | ||
is_termination_msg=lambda x: True if "TERMINATE" in x.get("content") else False | ||
) | ||
|
||
user = autogen.UserProxyAgent( | ||
name="User", | ||
human_input_mode="NEVER", | ||
max_consecutive_auto_reply=10, | ||
code_execution_config={ | ||
"work_dir": "teaching", | ||
"use_docker": False | ||
} | ||
) | ||
|
||
|
||
# create a simple function | ||
@user.register_for_execution() | ||
@assistant_create_recipe.register_for_llm(description="Recipe Assistant.") | ||
def save_recipe(recipe: Annotated[str, "Save the Recipe"]) -> str: | ||
with open('new_recipe.txt', 'w') as file: | ||
file.write(recipe) | ||
return recipe | ||
|
||
|
||
# initiate the chats | ||
user.initiate_chat(assistant, message=task1) | ||
user.initiate_chat(assistant, message=task2, clear_history=False) | ||
user.initiate_chat(assistant, message=task3, clear_history=False) | ||
user.initiate_chat(assistant_create_recipe, message=task4, clear_history=False) | ||
|
||
# initiate chat with the recipe from the file | ||
# with open('./new_recipe.txt', 'r') as file: | ||
# file_content = file.read() | ||
# | ||
# user.initiate_chat(assistant, message=file_content) | ||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,32 @@ | ||
# filename: generate_bar_chart.py | ||
import matplotlib.pyplot as plt | ||
|
||
# Data for application domains and number of papers | ||
domains = [ | ||
"AI-assisted decision-making", | ||
"User trust in AI", | ||
"Human-AI collaboration", | ||
"Human-robot teams", | ||
"Explainable robots", | ||
"Model calibration", | ||
"Trust explanations", | ||
"HCI", | ||
"Team cohesion", | ||
"Anthropomorphic agents" | ||
] | ||
|
||
num_papers = [2, 1, 2, 1, 1, 1, 1, 1, 1, 1] | ||
|
||
# Create a bar chart | ||
plt.figure(figsize=(12, 6)) | ||
plt.bar(domains, num_papers, color='skyblue') | ||
plt.xlabel('Application Domains') | ||
plt.ylabel('Number of Papers') | ||
plt.title('Distribution of Papers Across Application Domains') | ||
plt.xticks(rotation=45, ha='right') | ||
plt.tight_layout() | ||
|
||
# Save the bar chart to a file | ||
plt.savefig('papers_distribution.png') | ||
|
||
plt.show() |
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
34 changes: 34 additions & 0 deletions
34
autogen_teaching/teaching/gpt_application_domains_chart.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,34 @@ | ||
# filename: gpt_application_domains_chart.py | ||
|
||
import matplotlib.pyplot as plt | ||
|
||
# Define the application domains for GPT models | ||
domains_gpt = [ | ||
"Natural Language Understanding", | ||
"Text Generation", | ||
"Language Translation", | ||
"Conversational Agents", | ||
"Question Answering", | ||
"Summarization", | ||
"Language Modeling", | ||
"Sentiment Analysis", | ||
"Information Retrieval", | ||
"Speech Recognition" | ||
] | ||
|
||
num_papers_gpt = [25, 20, 15, 10, 8, 7, 6, 5, 4, 3] | ||
|
||
# Create a bar chart for GPT model application domains | ||
plt.figure(figsize=(12, 6)) | ||
plt.bar(domains_gpt, num_papers_gpt, color='lightcoral') | ||
plt.xlabel('Application Domains') | ||
plt.ylabel('Number of Papers') | ||
plt.title('Distribution of Papers Across Application Domains for GPT Models') | ||
plt.xticks(rotation=45, ha='right') | ||
plt.tight_layout() | ||
|
||
# Save the bar chart as an image file | ||
plt.savefig('gpt_application_domains_chart.png') | ||
|
||
# Display the bar chart | ||
plt.show() |
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,18 @@ | ||
# filename: search_arxiv_papers.py | ||
import requests | ||
|
||
# Define the search query | ||
query = "trust calibration AI" | ||
|
||
# Make a request to the arXiv API | ||
url = f"http://export.arxiv.org/api/query?search_query=all:{query}&max_results=10" | ||
response = requests.get(url) | ||
|
||
# Parse the XML response to extract paper titles and links | ||
from xml.etree import ElementTree as ET | ||
root = ET.fromstring(response.content) | ||
|
||
for entry in root.findall('{http://www.w3.org/2005/Atom}entry'): | ||
title = entry.find('{http://www.w3.org/2005/Atom}title').text | ||
link = entry.find('{http://www.w3.org/2005/Atom}id').text | ||
print(f"Title: {title}\nLink: {link}\n") |