Skip to content

Commit

Permalink
Fix mypy errors
Browse files Browse the repository at this point in the history
  • Loading branch information
jackmpcollins committed Nov 30, 2024
1 parent cda92aa commit 0f1facb
Show file tree
Hide file tree
Showing 4 changed files with 7 additions and 7 deletions.
2 changes: 1 addition & 1 deletion src/magentic/chat_model/openai_chat_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ def _(message: AssistantMessage[Any]) -> ChatCompletionMessageParam:

if isinstance(message.content, StreamedResponse):
content: list[str] = []
function_calls: list[FunctionCall] = []
function_calls: list[FunctionCall[Any]] = []
for item in message.content:
if isinstance(item, StreamedStr):
content.append(str(item))
Expand Down
4 changes: 2 additions & 2 deletions src/magentic/chat_model/stream.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ def __stream__(self) -> Iterator[StreamedStr | OutputT]:
# Finish the group to allow advancing to the next one
consume(self._streamed_str(stream, current_item_ref))
elif self._parser.is_tool_call(current_item):
tool_calls_stream = (
tool_calls_stream: Iterator[FunctionCallChunk] = (
tool_call_chunk
for item in chain([current_item], stream)
for tool_call_chunk in self._parser.iter_tool_calls(item)
Expand Down Expand Up @@ -247,7 +247,7 @@ async def __stream__(self) -> AsyncIterator[AsyncStreamedStr | OutputT]:
if not current_item_ref:
await aconsume(self._streamed_str(stream, current_item_ref))
elif self._parser.is_tool_call(current_item):
tool_calls_stream = (
tool_calls_stream: AsyncIterator[FunctionCallChunk] = (
tool_call_chunk
async for item in achain(async_iter([current_item]), stream)
for tool_call_chunk in self._parser.iter_tool_calls(item)
Expand Down
4 changes: 2 additions & 2 deletions tests/chat_model/test_anthropic_chat_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ def minus(a: int, b: int) -> int:

@pytest.mark.anthropic
def test_anthropic_chat_model_complete_streamed_response():
def get_weather(location: str):
def get_weather(location: str) -> None:
"""Get the weather for a location."""

chat_model = AnthropicChatModel("claude-3-opus-20240229")
Expand Down Expand Up @@ -243,7 +243,7 @@ def minus(a: int, b: int) -> int:

@pytest.mark.anthropic
async def test_anthropic_chat_model_acomplete_async_streamed_response():
def get_weather(location: str):
def get_weather(location: str) -> None:
"""Get the weather for a location."""

chat_model = AnthropicChatModel("claude-3-opus-20240229")
Expand Down
4 changes: 2 additions & 2 deletions tests/chat_model/test_openai_chat_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ def test_openai_chat_model_complete_seed():

@pytest.mark.openai
def test_openai_chat_model_complete_streamed_response():
def get_weather(location: str):
def get_weather(location: str) -> None:
"""Get the weather for a location."""

chat_model = OpenaiChatModel("gpt-4o")
Expand Down Expand Up @@ -276,7 +276,7 @@ class Test(BaseModel):

@pytest.mark.openai
async def test_openai_chat_model_acomplete_async_streamed_response():
def get_weather(location: str):
def get_weather(location: str) -> None:
"""Get the weather for a location."""

chat_model = OpenaiChatModel("gpt-4o")
Expand Down

0 comments on commit 0f1facb

Please sign in to comment.