From 49ad16833694b27d710fced59a2720c6a2a0b257 Mon Sep 17 00:00:00 2001 From: Aidan Do Date: Fri, 3 Jan 2025 04:21:35 +1100 Subject: [PATCH] [#407] Agents: Avoid calling tools that haven't been explicitly enabled (#637) # What does this PR do? Contributes to issue (#407) tl;dr - @subramen was getting a 500 error because llama-stack called code_interpreter when it never was defined as a tool. Prevents failures like: image ``` # Server side Traceback (most recent call last): File "/opt/conda/envs/llamastack-vllm-stack/lib/python3.10/site-packages/llama_stack/distribution/server/server.py", line 206, in sse_generator async for item in await event_gen: File "/opt/conda/envs/llamastack-vllm-stack/lib/python3.10/site-packages/llama_stack/providers/impls/meta_reference/agents/agents.py", line 138, in _create_agent_turn_streaming async for event in agent.create_and_execute_turn(request): File "/opt/conda/envs/llamastack-vllm-stack/lib/python3.10/site-packages/llama_stack/providers/impls/meta_reference/agents/agent_instance.py", line 179, in create_and_execute_turn async for chunk in self.run( File "/opt/conda/envs/llamastack-vllm-stack/lib/python3.10/site-packages/llama_stack/providers/impls/meta_reference/agents/agent_instance.py", line 252, in run async for res in self._run( File "/opt/conda/envs/llamastack-vllm-stack/lib/python3.10/site-packages/llama_stack/providers/impls/meta_reference/agents/agent_instance.py", line 560, in _run result_messages = await execute_tool_call_maybe( File "/opt/conda/envs/llamastack-vllm-stack/lib/python3.10/site-packages/llama_stack/providers/impls/meta_reference/agents/agent_instance.py", line 824, in execute_tool_call_maybe assert name in tools_dict, f"Tool {name} not found" AssertionError: Tool code_interpreter not found ``` Instead, if the model hallucinates, we just let it hallucinate and let the client know. image ## Test Plan
pytest llama_stack/providers/tests/agents/test_agents.py -k ollama ``` llama stack build --template ollama --image-type conda conda activate llamastack-ollama ``` ``` llama_stack/providers/tests/agents/test_agents.py ..Fss [100%] ======================================================================= FAILURES ======================================================================= _________________________________________ TestAgents.test_rag_agent_as_attachments[--ollama][ollama] __________________________________________ llama_stack/providers/tests/agents/test_agents.py:261: in test_rag_agent_as_attachments turn_response = [ llama_stack/providers/tests/agents/test_agents.py:261: in turn_response = [ llama_stack/providers/inline/agents/meta_reference/agents.py:153: in _create_agent_turn_streaming async for event in agent.create_and_execute_turn(request): llama_stack/providers/inline/agents/meta_reference/agent_instance.py:179: in create_and_execute_turn async for chunk in self.run( llama_stack/providers/inline/agents/meta_reference/agent_instance.py:250: in run async for res in self._run( llama_stack/providers/inline/agents/meta_reference/agent_instance.py:363: in _run rag_context, bank_ids = await self._retrieve_context( llama_stack/providers/inline/agents/meta_reference/agent_instance.py:698: in _retrieve_context bank_id = await self._ensure_memory_bank(session_id) llama_stack/providers/inline/agents/meta_reference/agent_instance.py:653: in _ensure_memory_bank await self.memory_banks_api.register_memory_bank( llama_stack/providers/utils/telemetry/trace_protocol.py:101: in async_wrapper result = await method(self, *args, **kwargs) llama_stack/distribution/routers/routing_tables.py:312: in register_memory_bank raise ValueError( E ValueError: Embeddings are now served via Inference providers. Please upgrade your run.yaml to include inline::sentence-transformer as an additional inference provider. See https://github.com/meta-llama/llama-stack/blob/main/llama_stack/templates/together/run.yaml for an example. =============================================================== short test summary info ================================================================ FAILED llama_stack/providers/tests/agents/test_agents.py::TestAgents::test_rag_agent_as_attachments[--ollama] - ValueError: Embeddings are now served via Inference providers. Please upgrade your run.yaml to include inline::sentence-transformer as an additiona... ========================================== 1 failed, 2 passed, 2 skipped, 20 deselected, 5 warnings in 14.24s ========================================== ``` Unrelated test is failing (also failing on main)
Manual Using this client code: https://github.com/aidando73/llama-stack-apps/blob/7ebc257b27bb120fe13e11d9d668a467a33e137d/client.py Screenshot 2024-12-16 at 17 41 31
## Before submitting - [ ] This PR fixes a typo or improves the docs (you can dismiss the other checks if that's the case). - [x] Ran pre-commit to handle lint / formatting issues. - [x] Read the [contributor guideline](https://github.com/meta-llama/llama-stack/blob/main/CONTRIBUTING.md), Pull Request section? - [ ] Updated relevant documentation. - [ ] Wrote necessary unit or integration tests. --- .../providers/inline/agents/meta_reference/agent_instance.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llama_stack/providers/inline/agents/meta_reference/agent_instance.py b/llama_stack/providers/inline/agents/meta_reference/agent_instance.py index f225f53939..09738d7b7d 100644 --- a/llama_stack/providers/inline/agents/meta_reference/agent_instance.py +++ b/llama_stack/providers/inline/agents/meta_reference/agent_instance.py @@ -584,7 +584,7 @@ async def _run( tool_call = message.tool_calls[0] name = tool_call.tool_name - if not isinstance(name, BuiltinTool): + if not isinstance(name, BuiltinTool) or name not in enabled_tools: yield message return