Merge pull request #29 from chakib-belgaid/feature/litellm_kwargs
Add support for additional keyword arguments in LiteLLMModel
This commit is contained in:
commit
4cfe4a6573
|
@ -424,6 +424,7 @@ class LiteLLMModel(Model):
|
||||||
model_id="anthropic/claude-3-5-sonnet-20240620",
|
model_id="anthropic/claude-3-5-sonnet-20240620",
|
||||||
api_base=None,
|
api_base=None,
|
||||||
api_key=None,
|
api_key=None,
|
||||||
|
**kwargs
|
||||||
):
|
):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.model_id = model_id
|
self.model_id = model_id
|
||||||
|
@ -431,6 +432,7 @@ class LiteLLMModel(Model):
|
||||||
litellm.add_function_to_prompt = True
|
litellm.add_function_to_prompt = True
|
||||||
self.api_base = api_base
|
self.api_base = api_base
|
||||||
self.api_key = api_key
|
self.api_key = api_key
|
||||||
|
self.kwargs = kwargs
|
||||||
|
|
||||||
def __call__(
|
def __call__(
|
||||||
self,
|
self,
|
||||||
|
@ -450,6 +452,7 @@ class LiteLLMModel(Model):
|
||||||
max_tokens=max_tokens,
|
max_tokens=max_tokens,
|
||||||
api_base=self.api_base,
|
api_base=self.api_base,
|
||||||
api_key=self.api_key,
|
api_key=self.api_key,
|
||||||
|
**self.kwargs,
|
||||||
)
|
)
|
||||||
self.last_input_token_count = response.usage.prompt_tokens
|
self.last_input_token_count = response.usage.prompt_tokens
|
||||||
self.last_output_token_count = response.usage.completion_tokens
|
self.last_output_token_count = response.usage.completion_tokens
|
||||||
|
@ -474,6 +477,7 @@ class LiteLLMModel(Model):
|
||||||
max_tokens=max_tokens,
|
max_tokens=max_tokens,
|
||||||
api_base=self.api_base,
|
api_base=self.api_base,
|
||||||
api_key=self.api_key,
|
api_key=self.api_key,
|
||||||
|
**self.kwargs,
|
||||||
)
|
)
|
||||||
tool_calls = response.choices[0].message.tool_calls[0]
|
tool_calls = response.choices[0].message.tool_calls[0]
|
||||||
self.last_input_token_count = response.usage.prompt_tokens
|
self.last_input_token_count = response.usage.prompt_tokens
|
||||||
|
|
Loading…
Reference in New Issue