Skip to content

Commit

Permalink
Change to anthropic claude client for bedrock
Browse files Browse the repository at this point in the history
  • Loading branch information
dirkbrnd committed Feb 8, 2025
1 parent 9084437 commit 11d998b
Show file tree
Hide file tree
Showing 32 changed files with 154 additions and 365 deletions.
File renamed without changes.
15 changes: 15 additions & 0 deletions cookbook/models/aws_bedrock/claude/async_basic.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
import asyncio

from agno.agent import Agent, RunResponse # noqa
from agno.models.aws.claude import Claude

agent = Agent(
model=Claude(id="anthropic.claude-3-5-sonnet-20240620-v1:0"), markdown=True
)

# Get the response in a variable
# run: RunResponse = agent.run("Share a 2 sentence horror story")
# print(run.content)

# Print the response in the terminal
asyncio.run(agent.aprint_response("Share a 2 sentence horror story"))
16 changes: 16 additions & 0 deletions cookbook/models/aws_bedrock/claude/async_basic_stream.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
import asyncio
from typing import Iterator # noqa
from agno.agent import Agent, RunResponse # noqa
from agno.models.aws.claude import Claude

agent = Agent(
model=Claude(id="anthropic.claude-3-5-sonnet-20240620-v1:0"), markdown=True
)

# Get the response in a variable
# run_response: Iterator[RunResponse] = agent.run("Share a 2 sentence horror story", stream=True)
# for chunk in run_response:
# print(chunk.content)

# Print the response in the terminal
asyncio.run(agent.aprint_response("Share a 2 sentence horror story", stream=True))
File renamed without changes.
9 changes: 6 additions & 3 deletions cookbook/models/huggingface/async_basic.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import asyncio

from agno.agent import Agent
from agno.models.huggingface import HuggingFace

Expand All @@ -7,6 +8,8 @@
id="mistralai/Mistral-7B-Instruct-v0.2", max_tokens=4096, temperature=0
),
)
asyncio.run(agent.aprint_response(
"What is meaning of life and then recommend 5 best books to read about it"
))
asyncio.run(
agent.aprint_response(
"What is meaning of life and then recommend 5 best books to read about it"
)
)
11 changes: 7 additions & 4 deletions cookbook/models/huggingface/async_basic_stream.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import asyncio

from agno.agent import Agent
from agno.models.huggingface import HuggingFace

Expand All @@ -7,7 +8,9 @@
id="mistralai/Mistral-7B-Instruct-v0.2", max_tokens=4096, temperature=0
),
)
asyncio.run(agent.aprint_response(
"What is meaning of life and then recommend 5 best books to read about it",
stream=True,
))
asyncio.run(
agent.aprint_response(
"What is meaning of life and then recommend 5 best books to read about it",
stream=True,
)
)
1 change: 1 addition & 0 deletions cookbook/models/openai/async_basic.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import asyncio

from agno.agent import Agent, RunResponse # noqa
from agno.models.openai import OpenAIChat

Expand Down
1 change: 1 addition & 0 deletions cookbook/models/openai/async_basic_stream.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import asyncio
from typing import Iterator # noqa

from agno.agent import Agent, RunResponse # noqa
from agno.models.openai import OpenAIChat

Expand Down
14 changes: 5 additions & 9 deletions libs/agno/agno/agent/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
from pydantic import BaseModel

from agno.agent.metrics import SessionMetrics
from agno.exceptions import AgentRunException, ModelProviderError, StopAgentRun
from agno.exceptions import ModelProviderError, StopAgentRun
from agno.knowledge.agent import AgentKnowledge
from agno.media import Audio, AudioArtifact, Image, ImageArtifact, Video, VideoArtifact
from agno.memory.agent import AgentMemory, AgentRun
Expand Down Expand Up @@ -630,9 +630,7 @@ def _run(
# 9. Update Agent Memory
# Add the system message to the memory
if run_messages.system_message is not None:
self.memory.add_system_message(
run_messages.system_message, system_message_role=self.system_message_role
)
self.memory.add_system_message(run_messages.system_message, system_message_role=self.system_message_role)

# Build a list of messages that should be added to the AgentMemory
messages_for_memory: List[Message] = (
Expand Down Expand Up @@ -1062,9 +1060,7 @@ async def _arun(
# 9. Update Agent Memory
# Add the system message to the memory
if run_messages.system_message is not None:
self.memory.add_system_message(
run_messages.system_message, system_message_role=self.system_message_role
)
self.memory.add_system_message(run_messages.system_message, system_message_role=self.system_message_role)

# Build a list of messages that should be added to the AgentMemory
messages_for_memory: List[Message] = (
Expand Down Expand Up @@ -1829,7 +1825,7 @@ def get_system_message(self) -> Optional[Message]:
if self.response_model is not None and not self.structured_outputs:
sys_message_content += f"\n{self.get_json_output_prompt()}"

return Message(role=self.system_message_role, content=sys_message_content)
return Message(role=self.system_message_role, content=sys_message_content) # type: ignore

# 2. If create_default_system_message is False, return None.
if not self.create_default_system_message:
Expand Down Expand Up @@ -1971,7 +1967,7 @@ def get_system_message(self) -> Optional[Message]:

# Return the system message
return (
Message(role=self.system_message_role, content=system_message_content.strip())
Message(role=self.system_message_role, content=system_message_content.strip()) # type: ignore
if system_message_content
else None
)
Expand Down
8 changes: 2 additions & 6 deletions libs/agno/agno/embedder/huggingface.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from agno.utils.log import logger

try:
from huggingface_hub import InferenceClient, SentenceSimilarityInput
from huggingface_hub import InferenceClient
except ImportError:
logger.error("`huggingface-hub` not installed, please run `pip install huggingface-hub`")
raise
Expand All @@ -34,11 +34,7 @@ def client(self) -> InferenceClient:
return InferenceClient(**_client_params)

def _response(self, text: str):
_request_params: SentenceSimilarityInput = {
"json": {"inputs": text},
"model": self.id,
}
return self.client.post(**_request_params)
return self.client.post(json={"inputs": text}, model=self.id)

def get_embedding(self, text: str) -> List[float]:
response = self._response(text=text)
Expand Down
8 changes: 4 additions & 4 deletions libs/agno/agno/models/anthropic/claude.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
from collections.abc import AsyncIterator
import json
from collections.abc import AsyncIterator
from dataclasses import dataclass
from os import getenv
from typing import Any, Dict, Iterator, List, Optional, Tuple, Union
from typing import Any, Dict, List, Optional, Tuple, Union

from agno.exceptions import ModelProviderError
from agno.media import Image
Expand Down Expand Up @@ -35,6 +35,7 @@
"tool": "user",
}


def _format_image_for_message(image: Image) -> Optional[Dict[str, Any]]:
"""
Add an image to a message by converting it to base64 encoded format.
Expand Down Expand Up @@ -137,8 +138,8 @@ def _format_messages(messages: List[Message]) -> Tuple[List[Dict[str, str]], str
else {},
name=tool_call["function"]["name"],
type="tool_use",
)
)
)

chat_messages.append({"role": ROLE_MAP[message.role], "content": content}) # type: ignore
return chat_messages, " ".join(system_messages)
Expand Down Expand Up @@ -172,7 +173,6 @@ class Claude(Model):
client: Optional[AnthropicClient] = None
async_client: Optional[AsyncAnthropicClient] = None


def _get_client_params(self) -> Dict[str, Any]:
client_params: Dict[str, Any] = {}

Expand Down
Loading

0 comments on commit 11d998b

Please sign in to comment.