Skip to content

Commit 9f7369b

Browse files
committed
add other ollama visual llms
tosquash to squash w/ first
1 parent 9cd5dd1 commit 9f7369b

File tree

1 file changed

+8
-6
lines changed

1 file changed

+8
-6
lines changed

operate/models/apis.py

+8-6
Original file line numberDiff line numberDiff line change
@@ -52,8 +52,8 @@ async def get_next_action(model, messages, objective, session_id):
5252
return "coming soon"
5353
if model == "gemini-pro-vision":
5454
return call_gemini_pro_vision(messages, objective), None
55-
if model == "llava":
56-
operation = call_ollama_llava(messages)
55+
if model == "llava" or model == "llava:13b" or "bakllava" or "llava-llama3":
56+
operation = call_ollama_llava(messages, model)
5757
return operation, None
5858
if model == "claude-3":
5959
operation = await call_claude_3_with_ocr(messages, objective, model)
@@ -453,9 +453,11 @@ async def call_gpt_4_vision_preview_labeled(messages, objective, model):
453453
return call_gpt_4_vision_preview(messages)
454454

455455

456-
def call_ollama_llava(messages):
456+
def call_ollama_llava(messages, model):
457+
if model == "":
458+
model = "llava"
457459
if config.verbose:
458-
print("[call_ollama_llava]")
460+
print(f"[call_ollama_llava] model {model}")
459461
time.sleep(1)
460462
try:
461463
screenshots_dir = "screenshots"
@@ -485,7 +487,7 @@ def call_ollama_llava(messages):
485487
messages.append(vision_message)
486488

487489
response = ollama.chat(
488-
model="llava",
490+
model=model,
489491
messages=messages,
490492
)
491493

@@ -527,7 +529,7 @@ def call_ollama_llava(messages):
527529
)
528530
if config.verbose:
529531
traceback.print_exc()
530-
return call_ollama_llava(messages)
532+
return call_ollama_llava(messages, model)
531533

532534

533535
async def call_claude_3_with_ocr(messages, objective, model):

0 commit comments

Comments
 (0)