From 7ea3874fbfef0e724640e2be92e3fa045aec4acd Mon Sep 17 00:00:00 2001 From: killian <63927363+KillianLucas@users.noreply.github.com> Date: Wed, 15 Nov 2023 19:26:32 -0800 Subject: [PATCH] Fixed `vision` function_calling error for Python package Former-commit-id: a60f7de15611addb5b6e2f37e23795354094c15e Former-commit-id: 52971352e3d55e3108703b14114f3c1c7138bd34 Former-commit-id: 7c0390693517944e9f91d120950ef0f0f24b2751 [formerly 06c3c774930dfc0d6d7b27342073c5530571f02c] Former-commit-id: 6b7398639353a3b7f320c0e9f9b08f619d5dfcd8 --- interpreter/llm/setup_llm.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/interpreter/llm/setup_llm.py b/interpreter/llm/setup_llm.py index a2c774ea27..3f527a5c1b 100644 --- a/interpreter/llm/setup_llm.py +++ b/interpreter/llm/setup_llm.py @@ -6,6 +6,7 @@ from .setup_openai_coding_llm import setup_openai_coding_llm from .setup_text_llm import setup_text_llm + def setup_llm(interpreter): """ Takes an Interpreter (which includes a ton of LLM settings), @@ -15,7 +16,8 @@ def setup_llm(interpreter): # Detect whether or not it's a function calling LLM if interpreter.function_calling_llm == None: if not interpreter.local and ( - interpreter.model in litellm.open_ai_chat_completion_models + interpreter.model != "gpt-4-vision-preview" + and interpreter.model in litellm.open_ai_chat_completion_models or interpreter.model.startswith("azure/") ): interpreter.function_calling_llm = True