diff --git a/examples/clients/simple-chatbot/mcp_simple_chatbot/main.py b/examples/clients/simple-chatbot/mcp_simple_chatbot/main.py index a06e593bd..b939b6e5b 100644 --- a/examples/clients/simple-chatbot/mcp_simple_chatbot/main.py +++ b/examples/clients/simple-chatbot/mcp_simple_chatbot/main.py @@ -245,7 +245,7 @@ def get_response(self, messages: list[dict[str, str]]) -> str: } payload = { "messages": messages, - "model": "llama-3.2-90b-vision-preview", + "model": "meta-llama/llama-4-scout-17b-16e-instruct", "temperature": 0.7, "max_tokens": 4096, "top_p": 1, @@ -323,8 +323,7 @@ async def process_llm_response(self, llm_response: str) -> str: total = result["total"] percentage = (progress / total) * 100 logging.info( - f"Progress: {progress}/{total} " - f"({percentage:.1f}%)" + f"Progress: {progress}/{total} ({percentage:.1f}%)" ) return f"Tool execution result: {result}"