From e3ae1800e2f23dbc16ab7487b144d33941edf1cb Mon Sep 17 00:00:00 2001 From: Aymeric Roucher <69208727+aymeric-roucher@users.noreply.github.com> Date: Fri, 17 Jan 2025 20:20:28 +0100 Subject: [PATCH] Fixes before release (#254) * Fixes before release --- docs/source/en/reference/agents.md | 11 ++++++----- docs/source/zh/guided_tour.md | 4 ++-- docs/source/zh/reference/agents.md | 4 ++-- examples/tool_calling_agent_mcp.py | 4 ++-- pyproject.toml | 3 ++- src/smolagents/e2b_executor.py | 2 +- tests/test_all_docs.py | 2 +- 7 files changed, 16 insertions(+), 14 deletions(-) diff --git a/docs/source/en/reference/agents.md b/docs/source/en/reference/agents.md index 802f19d..76b2ecb 100644 --- a/docs/source/en/reference/agents.md +++ b/docs/source/en/reference/agents.md @@ -136,8 +136,8 @@ messages = [ {"role": "user", "content": "No need to help, take it easy."}, ] -model = LiteLLMModel("anthropic/claude-3-5-sonnet-latest", temperature=0.2) -print(model(messages, max_tokens=10)) +model = LiteLLMModel("anthropic/claude-3-5-sonnet-latest", temperature=0.2, max_tokens=10) +print(model(messages)) ``` [[autodoc]] LiteLLMModel @@ -145,12 +145,13 @@ print(model(messages, max_tokens=10)) ### OpenAiServerModel This class lets you call any OpenAIServer compatible model. -Here's how you can set it: +Here's how you can set it (you can customise the `api_base` url to point to another server): ```py +from smolagents import OpenAIServerModel + model = OpenAIServerModel( model_id="gpt-4o", - base_url="https://api.openai.com/v1", + api_base="https://api.openai.com/v1", api_key=os.environ["OPENAI_API_KEY"], ) -model=LiteLLMModel("gpt-4o", api_key=os.environ["OPENAI_API_KEY"]) ``` \ No newline at end of file diff --git a/docs/source/zh/guided_tour.md b/docs/source/zh/guided_tour.md index 07988fe..9816a4f 100644 --- a/docs/source/zh/guided_tour.md +++ b/docs/source/zh/guided_tour.md @@ -173,9 +173,9 @@ Transformers 附带了一个用于增强 agent 的默认工具箱,您可以在 您可以通过调用 [`load_tool`] 函数和要执行的任务手动使用工具。 ```python -from smolagents import load_tool +from smolagents import DuckDuckGoSearchTool -search_tool = load_tool("web_search") +search_tool = DuckDuckGoSearchTool() print(search_tool("Who's the current president of Russia?")) ``` diff --git a/docs/source/zh/reference/agents.md b/docs/source/zh/reference/agents.md index 9cdca7d..dc011d3 100644 --- a/docs/source/zh/reference/agents.md +++ b/docs/source/zh/reference/agents.md @@ -136,8 +136,8 @@ messages = [ {"role": "user", "content": "No need to help, take it easy."}, ] -model = LiteLLMModel("anthropic/claude-3-5-sonnet-latest", temperature=0.2) -print(model(messages, max_tokens=10)) +model = LiteLLMModel("anthropic/claude-3-5-sonnet-latest", temperature=0.2, max_tokens=10) +print(model(messages)) ``` [[autodoc]] LiteLLMModel \ No newline at end of file diff --git a/examples/tool_calling_agent_mcp.py b/examples/tool_calling_agent_mcp.py index da73c46..c0e613a 100644 --- a/examples/tool_calling_agent_mcp.py +++ b/examples/tool_calling_agent_mcp.py @@ -23,5 +23,5 @@ mcp_server_params = StdioServerParameters( with ToolCollection.from_mcp(mcp_server_params) as tool_collection: # print(tool_collection.tools[0](request={"term": "efficient treatment hangover"})) - agent = CodeAgent(tools=tool_collection.tools, model=HfApiModel()) - agent.run("Find studies about hangover?") + agent = CodeAgent(tools=tool_collection.tools, model=HfApiModel(), max_steps=4) + agent.run("Find me one risk associated with drinking alcohol regularly on low doses for humans.") diff --git a/pyproject.toml b/pyproject.toml index 2a8c960..e3ff96d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -37,7 +37,8 @@ litellm = [ "litellm>=1.55.10", ] mcp = [ - "mcpadapt>=0.0.6" + "mcpadapt>=0.0.6", + "mcp", ] openai = [ "openai>=1.58.1" diff --git a/src/smolagents/e2b_executor.py b/src/smolagents/e2b_executor.py index e8cc893..8a20a9e 100644 --- a/src/smolagents/e2b_executor.py +++ b/src/smolagents/e2b_executor.py @@ -43,7 +43,7 @@ class E2BExecutor: # ) # print("Installation of agents package finished.") self.logger = logger - additional_imports = additional_imports + ["pickle5"] + additional_imports = additional_imports + ["pickle5", "smolagents"] if len(additional_imports) > 0: execution = self.sbx.commands.run( "pip install " + " ".join(additional_imports) diff --git a/tests/test_all_docs.py b/tests/test_all_docs.py index ae5e8cb..d1adabd 100644 --- a/tests/test_all_docs.py +++ b/tests/test_all_docs.py @@ -85,7 +85,7 @@ class TestDocs: def setup_class(cls): cls._tmpdir = tempfile.mkdtemp() cls.launch_args = ["python3"] - cls.docs_dir = Path(__file__).parent.parent / "docs" / "source" + cls.docs_dir = Path(__file__).parent.parent / "docs" / "source" / "en" cls.extractor = DocCodeExtractor() if not cls.docs_dir.exists():