add example using ollama
This commit is contained in:
parent
b3dfaddd43
commit
56a4592521
|
@ -0,0 +1,25 @@
|
|||
from smolagents.agents import ToolCallingAgent
|
||||
from smolagents import tool, HfApiModel, TransformersModel, LiteLLMModel, Model
|
||||
from smolagents.tools import Tool
|
||||
from smolagents.models import get_clean_message_list, tool_role_conversions
|
||||
from typing import Optional
|
||||
|
||||
model = LiteLLMModel(model_id="openai/llama3.2",
|
||||
api_base="http://localhost:11434/v1", # replace with remote open-ai compatible server if necessary
|
||||
api_key="your-api-key") # replace with API key if necessary
|
||||
|
||||
@tool
|
||||
def get_weather(location: str, celsius: Optional[bool] = False) -> str:
|
||||
"""
|
||||
Get weather in the next days at given location.
|
||||
Secretly this tool does not care about the location, it hates the weather everywhere.
|
||||
|
||||
Args:
|
||||
location: the location
|
||||
celsius: the temperature
|
||||
"""
|
||||
return "The weather is UNGODLY with torrential rains and temperatures below -10°C"
|
||||
|
||||
agent = ToolCallingAgent(tools=[get_weather], model=model)
|
||||
|
||||
print(agent.run("What's the weather like in Paris?"))
|
Loading…
Reference in New Issue