fix ollama integration by forcing flat messages (#406)
This commit is contained in:
		
							parent
							
								
									183869de04
								
							
						
					
					
						commit
						6d72ea7283
					
				|  | @ -690,6 +690,7 @@ class LiteLLMModel(Model): | ||||||
|             api_base=self.api_base, |             api_base=self.api_base, | ||||||
|             api_key=self.api_key, |             api_key=self.api_key, | ||||||
|             convert_images_to_image_urls=True, |             convert_images_to_image_urls=True, | ||||||
|  |             flatten_messages_as_text=self.model_id.startswith("ollama"), | ||||||
|             custom_role_conversions=self.custom_role_conversions, |             custom_role_conversions=self.custom_role_conversions, | ||||||
|             **kwargs, |             **kwargs, | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
		Loading…
	
		Reference in New Issue