Skip to content

Commit

Permalink
Remove Langchain::Thread class (#783)
Browse files Browse the repository at this point in the history
* wip

* update comments

* Add changelog entry

* fix specs

* fixes
  • Loading branch information
andreibondarev authored Sep 17, 2024
1 parent e5550e2 commit fdd5df2
Show file tree
Hide file tree
Showing 6 changed files with 133 additions and 180 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
## [Unreleased]
- Remove `Langchain::Thread` class as it was not needed.

## [0.15.6] - 2024-09-16
- Throw an error when `Langchain::Assistant#add_message_callback` is not a callable proc.
Expand Down
4 changes: 1 addition & 3 deletions examples/assistant_chat.rb
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,8 @@
# or add `gem "reline"` to your Gemfile

openai = Langchain::LLM::OpenAI.new(api_key: ENV["OPENAI_API_KEY"])
thread = Langchain::Thread.new
assistant = Langchain::Assistant.new(
llm: openai,
thread: thread,
instructions: "You are a Meteorologist Assistant that is able to pull the weather for any location",
tools: [
Langchain::Tool::Weather.new(api_key: ENV["OPEN_WEATHER_API_KEY"])
Expand Down Expand Up @@ -54,7 +52,7 @@ def prompt_for_message
end

assistant.add_message_and_run content: user_message, auto_tool_execution: true
puts assistant.thread.messages.last.content
puts assistant.messages.last.content
end
rescue Interrupt
exit 0
Expand Down
115 changes: 66 additions & 49 deletions lib/langchain/assistants/assistant.rb
Original file line number Diff line number Diff line change
Expand Up @@ -12,27 +12,23 @@ module Langchain
# tools: [Langchain::Tool::NewsRetriever.new(api_key: ENV["NEWS_API_KEY"])]
# )
class Assistant
extend Forwardable
def_delegators :thread, :messages

attr_reader :llm, :thread, :instructions, :state, :llm_adapter, :tool_choice
attr_reader :total_prompt_tokens, :total_completion_tokens, :total_tokens
attr_reader :llm, :instructions, :state, :llm_adapter, :tool_choice
attr_reader :total_prompt_tokens, :total_completion_tokens, :total_tokens, :messages
attr_accessor :tools, :add_message_callback

# Create a new assistant
#
# @param llm [Langchain::LLM::Base] LLM instance that the assistant will use
# @param thread [Langchain::Thread] The thread that'll keep track of the conversation
# @param tools [Array<Langchain::Tool::Base>] Tools that the assistant has access to
# @param instructions [String] The system instructions to include in the thread
# @param instructions [String] The system instructions
# @param tool_choice [String] Specify how tools should be selected. Options: "auto", "any", "none", or <specific function name>
# @params add_message_callback [Proc] A callback function (Proc or lambda) that is called when any message is added to the conversation
def initialize(
llm:,
thread: nil,
tools: [],
instructions: nil,
tool_choice: "auto",
messages: [],
add_message_callback: nil
)
unless tools.is_a?(Array) && tools.all? { |tool| tool.class.singleton_class.included_modules.include?(Langchain::ToolDefinition) }
Expand All @@ -42,14 +38,13 @@ def initialize(
@llm = llm
@llm_adapter = LLM::Adapter.build(llm)

@thread = thread || Langchain::Thread.new

# TODO: Validate that it is, indeed, a Proc or lambda
if !add_message_callback.nil? && !add_message_callback.respond_to?(:call)
raise ArgumentError, "add_message_callback must be a callable object, like Proc or lambda"
end
@thread.add_message_callback = add_message_callback
@add_message_callback = add_message_callback

self.messages = messages
@tools = tools
self.tool_choice = tool_choice
@instructions = instructions
Expand All @@ -59,41 +54,60 @@ def initialize(
@total_completion_tokens = 0
@total_tokens = 0

raise ArgumentError, "Thread must be an instance of Langchain::Thread" unless @thread.is_a?(Langchain::Thread)

# The first message in the thread should be the system instructions
# The first message in the messages array should be the system instructions
# For Google Gemini, and Anthropic system instructions are added to the `system:` param in the `chat` method
initialize_instructions
end

# Add a user message to the thread
# Add a user message to the messages array
#
# @param content [String] The content of the message
# @param role [String] The role attribute of the message. Default: "user"
# @param tool_calls [Array<Hash>] The tool calls to include in the message
# @param tool_call_id [String] The ID of the tool call to include in the message
# @return [Array<Langchain::Message>] The messages in the thread
# @return [Array<Langchain::Message>] The messages
def add_message(content: nil, role: "user", tool_calls: [], tool_call_id: nil)
message = build_message(role: role, content: content, tool_calls: tool_calls, tool_call_id: tool_call_id)
messages = thread.add_message(message)

# Call the callback with the message
add_message_callback.call(message) if add_message_callback # rubocop:disable Style/SafeNavigation

# Prepend the message to the messages array
messages << message

@state = :ready

messages
end

# Set multiple messages to the thread
# Convert messages to an LLM APIs-compatible array of hashes
#
# @param messages [Array<Hash>] The messages to set
# @return [Array<Langchain::Message>] The messages in the thread
# @return [Array<Hash>] Messages as an OpenAI API-compatible array of hashes
def array_of_message_hashes
messages
.map(&:to_hash)
.compact
end

# Only used by the Assistant when it calls the LLM#complete() method
def prompt_of_concatenated_messages
messages.map(&:to_s).join
end

# Set multiple messages
#
# @param messages [Array<Langchain::Message>] The messages to set
# @return [Array<Langchain::Message>] The messages
def messages=(messages)
clear_thread!
add_messages(messages: messages)
raise ArgumentError, "messages array must only contain Langchain::Message instance(s)" unless messages.is_a?(Array) && messages.all? { |m| m.is_a?(Langchain::Messages::Base) }

@messages = messages
end

# Add multiple messages to the thread
# Add multiple messages
#
# @param messages [Array<Hash>] The messages to add
# @return [Array<Langchain::Message>] The messages in the thread
# @return [Array<Langchain::Message>] The messages
def add_messages(messages:)
messages.each do |message_hash|
add_message(**message_hash.slice(:content, :role, :tool_calls, :tool_call_id))
Expand All @@ -103,77 +117,80 @@ def add_messages(messages:)
# Run the assistant
#
# @param auto_tool_execution [Boolean] Whether or not to automatically run tools
# @return [Array<Langchain::Message>] The messages in the thread
# @return [Array<Langchain::Message>] The messages
def run(auto_tool_execution: false)
if thread.messages.empty?
Langchain.logger.warn("No messages in the thread")
if messages.empty?
Langchain.logger.warn("No messages to process")
@state = :completed
return
end

@state = :in_progress
@state = handle_state until run_finished?(auto_tool_execution)

thread.messages
messages
end

# Run the assistant with automatic tool execution
#
# @return [Array<Langchain::Message>] The messages in the thread
# @return [Array<Langchain::Message>] The messages
def run!
run(auto_tool_execution: true)
end

# Add a user message to the thread and run the assistant
# Add a user message and run the assistant
#
# @param content [String] The content of the message
# @param auto_tool_execution [Boolean] Whether or not to automatically run tools
# @return [Array<Langchain::Message>] The messages in the thread
# @return [Array<Langchain::Message>] The messages
def add_message_and_run(content:, auto_tool_execution: false)
add_message(content: content, role: "user")
run(auto_tool_execution: auto_tool_execution)
end

# Add a user message to the thread and run the assistant with automatic tool execution
# Add a user message and run the assistant with automatic tool execution
#
# @param content [String] The content of the message
# @return [Array<Langchain::Message>] The messages in the thread
# @return [Array<Langchain::Message>] The messages
def add_message_and_run!(content:)
add_message_and_run(content: content, auto_tool_execution: true)
end

# Submit tool output to the thread
# Submit tool output
#
# @param tool_call_id [String] The ID of the tool call to submit output for
# @param output [String] The output of the tool
# @return [Array<Langchain::Message>] The messages in the thread
# @return [Array<Langchain::Message>] The messages
def submit_tool_output(tool_call_id:, output:)
tool_role = determine_tool_role

# TODO: Validate that `tool_call_id` is valid by scanning messages and checking if this tool call ID was invoked
add_message(role: tool_role, content: output, tool_call_id: tool_call_id)
end

# Delete all messages in the thread
# Delete all messages
#
# @return [Array] Empty messages array
def clear_thread!
def clear_messages!
# TODO: If this a bug? Should we keep the "system" message?
thread.messages = []
@messages = []
end

# TODO: Remove in the next major release
alias_method :clear_thread!, :clear_messages!

# Set new instructions
#
# @param new_instructions [String] New instructions that will be set as a system message
# @return [Array<Langchain::Message>] The messages in the thread
# @return [Array<Langchain::Message>] The messages
def instructions=(new_instructions)
@instructions = new_instructions

# This only needs to be done that support Message#@role="system"
if !llm.is_a?(Langchain::LLM::GoogleGemini) &&
!llm.is_a?(Langchain::LLM::GoogleVertexAI) &&
!llm.is_a?(Langchain::LLM::Anthropic)
# Find message with role: "system" in thread.messages and delete it from the thread.messages array
# Find message with role: "system" in messages and delete it from the messages array
replace_system_message!(content: new_instructions)
end
end
Expand All @@ -192,12 +209,12 @@ def tool_choice=(new_tool_choice)
# Replace old system message with new one
#
# @param content [String] New system message content
# @return [Array<Langchain::Message>] The messages in the thread
# @return [Array<Langchain::Message>] The messages
def replace_system_message!(content:)
thread.messages.delete_if(&:system?)
messages.delete_if(&:system?)

message = build_message(role: "system", content: content)
thread.messages.unshift(message)
messages.unshift(message)
end

# TODO: If tool_choice = "tool_function_name" and then tool is removed from the assistant, should we set tool_choice back to "auto"?
Expand Down Expand Up @@ -231,11 +248,11 @@ def handle_state
end
end

# Process the latest message in the thread
# Process the latest message
#
# @return [Symbol] The next state
def process_latest_message
last_message = thread.messages.last
last_message = messages.last

case last_message.standard_role
when :system
Expand All @@ -261,14 +278,14 @@ def handle_system_message
#
# @return [Symbol] The next state
def handle_llm_message
thread.messages.last.tool_calls.any? ? :requires_action : :completed
messages.last.tool_calls.any? ? :requires_action : :completed
end

# Handle unexpected message scenario
#
# @return [Symbol] The failed state
def handle_unexpected_message
Langchain.logger.error("Unexpected message role encountered: #{thread.messages.last.standard_role}")
Langchain.logger.error("Unexpected message role encountered: #{messages.last.standard_role}")
:failed
end

Expand Down Expand Up @@ -301,7 +318,7 @@ def set_state_for(response:)
#
# @return [Symbol] The next state
def execute_tools
run_tools(thread.messages.last.tool_calls)
run_tools(messages.last.tool_calls)
:in_progress
rescue => e
Langchain.logger.error("Error running tools: #{e.message}; #{e.backtrace.join('\n')}")
Expand Down Expand Up @@ -340,7 +357,7 @@ def chat_with_llm

params = @llm_adapter.build_chat_params(
instructions: @instructions,
messages: thread.array_of_message_hashes,
messages: array_of_message_hashes,
tools: @tools,
tool_choice: tool_choice
)
Expand Down
46 changes: 0 additions & 46 deletions lib/langchain/assistants/thread.rb

This file was deleted.

Loading

0 comments on commit fdd5df2

Please sign in to comment.