parent
							
								
									a4d029da88
								
							
						
					
					
						commit
						e3ae1800e2
					
				|  | @ -136,8 +136,8 @@ messages = [ | |||
|   {"role": "user", "content": "No need to help, take it easy."}, | ||||
| ] | ||||
| 
 | ||||
| model = LiteLLMModel("anthropic/claude-3-5-sonnet-latest", temperature=0.2) | ||||
| print(model(messages, max_tokens=10)) | ||||
| model = LiteLLMModel("anthropic/claude-3-5-sonnet-latest", temperature=0.2, max_tokens=10) | ||||
| print(model(messages)) | ||||
| ``` | ||||
| 
 | ||||
| [[autodoc]] LiteLLMModel | ||||
|  | @ -145,12 +145,13 @@ print(model(messages, max_tokens=10)) | |||
| ### OpenAiServerModel | ||||
| 
 | ||||
| This class lets you call any OpenAIServer compatible model. | ||||
| Here's how you can set it: | ||||
| Here's how you can set it (you can customise the `api_base` url to point to another server): | ||||
| ```py | ||||
| from smolagents import OpenAIServerModel | ||||
| 
 | ||||
| model = OpenAIServerModel( | ||||
|     model_id="gpt-4o", | ||||
|     base_url="https://api.openai.com/v1", | ||||
|     api_base="https://api.openai.com/v1", | ||||
|     api_key=os.environ["OPENAI_API_KEY"], | ||||
| ) | ||||
| model=LiteLLMModel("gpt-4o", api_key=os.environ["OPENAI_API_KEY"]) | ||||
| ``` | ||||
|  | @ -173,9 +173,9 @@ Transformers 附带了一个用于增强 agent 的默认工具箱,您可以在 | |||
| 您可以通过调用 [`load_tool`] 函数和要执行的任务手动使用工具。 | ||||
| 
 | ||||
| ```python | ||||
| from smolagents import load_tool | ||||
| from smolagents import DuckDuckGoSearchTool | ||||
| 
 | ||||
| search_tool = load_tool("web_search") | ||||
| search_tool = DuckDuckGoSearchTool() | ||||
| print(search_tool("Who's the current president of Russia?")) | ||||
| ``` | ||||
| 
 | ||||
|  |  | |||
|  | @ -136,8 +136,8 @@ messages = [ | |||
|   {"role": "user", "content": "No need to help, take it easy."}, | ||||
| ] | ||||
| 
 | ||||
| model = LiteLLMModel("anthropic/claude-3-5-sonnet-latest", temperature=0.2) | ||||
| print(model(messages, max_tokens=10)) | ||||
| model = LiteLLMModel("anthropic/claude-3-5-sonnet-latest", temperature=0.2, max_tokens=10) | ||||
| print(model(messages)) | ||||
| ``` | ||||
| 
 | ||||
| [[autodoc]] LiteLLMModel | ||||
|  | @ -23,5 +23,5 @@ mcp_server_params = StdioServerParameters( | |||
| 
 | ||||
| with ToolCollection.from_mcp(mcp_server_params) as tool_collection: | ||||
|     # print(tool_collection.tools[0](request={"term": "efficient treatment hangover"})) | ||||
|     agent = CodeAgent(tools=tool_collection.tools, model=HfApiModel()) | ||||
|     agent.run("Find studies about hangover?") | ||||
|     agent = CodeAgent(tools=tool_collection.tools, model=HfApiModel(), max_steps=4) | ||||
|     agent.run("Find me one risk associated with drinking alcohol regularly on low doses for humans.") | ||||
|  |  | |||
|  | @ -37,7 +37,8 @@ litellm = [ | |||
|   "litellm>=1.55.10", | ||||
| ] | ||||
| mcp = [ | ||||
|   "mcpadapt>=0.0.6" | ||||
|   "mcpadapt>=0.0.6", | ||||
|   "mcp", | ||||
| ] | ||||
| openai = [ | ||||
|   "openai>=1.58.1" | ||||
|  |  | |||
|  | @ -43,7 +43,7 @@ class E2BExecutor: | |||
|         # ) | ||||
|         # print("Installation of agents package finished.") | ||||
|         self.logger = logger | ||||
|         additional_imports = additional_imports + ["pickle5"] | ||||
|         additional_imports = additional_imports + ["pickle5", "smolagents"] | ||||
|         if len(additional_imports) > 0: | ||||
|             execution = self.sbx.commands.run( | ||||
|                 "pip install " + " ".join(additional_imports) | ||||
|  |  | |||
|  | @ -85,7 +85,7 @@ class TestDocs: | |||
|     def setup_class(cls): | ||||
|         cls._tmpdir = tempfile.mkdtemp() | ||||
|         cls.launch_args = ["python3"] | ||||
|         cls.docs_dir = Path(__file__).parent.parent / "docs" / "source" | ||||
|         cls.docs_dir = Path(__file__).parent.parent / "docs" / "source" / "en" | ||||
|         cls.extractor = DocCodeExtractor() | ||||
| 
 | ||||
|         if not cls.docs_dir.exists(): | ||||
|  |  | |||
		Loading…
	
		Reference in New Issue