Skip to content

Commit 08897d1

Browse files
committed
Improve error message when Ollama is not installed
1 parent ce08807 commit 08897d1

File tree

3 files changed

+12
-1
lines changed

3 files changed

+12
-1
lines changed

+llms/+utils/errorMessageCatalog.m

+1
Original file line numberDiff line numberDiff line change
@@ -77,4 +77,5 @@
7777
catalog("llms:unsupportedDatatypeInPrototype") = "Invalid data type ''{1}'' in prototype. Prototype must be a struct, composed of numerical, string, logical, categorical, or struct.";
7878
catalog("llms:incorrectResponseFormat") = "Invalid response format. Response format must be ""text"", ""json"", a struct, or a string with a JSON Schema definition.";
7979
catalog("llms:OllamaStructuredOutputNeeds05") = "Structured output is not supported for Ollama version {1}. Use version 0.5.0 or newer.";
80+
catalog("llms:noOllamaFound") = "Unable to connect to Ollama. Check that {1} is a valid endpoint and ensure that Ollama is installed and running.";
8081
end

ollamaChat.m

+5-1
Original file line numberDiff line numberDiff line change
@@ -236,7 +236,7 @@
236236
streamFun = this.StreamFun;
237237
end
238238

239-
try % just for nicer errors, reducing the stack depth shown
239+
try
240240
[text, message, response] = llms.internal.callOllamaChatAPI(...
241241
nvp.ModelName, messagesStruct, this.FunctionsStruct, ...
242242
Temperature=nvp.Temperature, ToolChoice=toolChoice, ...
@@ -247,6 +247,10 @@
247247
TimeOut=nvp.TimeOut, StreamFun=streamFun, ...
248248
Endpoint=nvp.Endpoint);
249249
catch e
250+
if e.identifier == "MATLAB:webservices:ConnectionRefused"
251+
error("llms:noOllamaFound",llms.utils.errorMessageCatalog.getMessage("llms:noOllamaFound",nvp.Endpoint));
252+
end
253+
% for nicer errors, throw instead of rethrow, reducing the stack depth shown
250254
throw(e);
251255
end
252256

tests/tollamaChat.m

+6
Original file line numberDiff line numberDiff line change
@@ -277,6 +277,12 @@ function doReturnErrors(testCase)
277277
testCase.verifyError(@() generate(chat,"hi!"), "llms:apiReturnedError");
278278
end
279279

280+
function errorNoOllamaServer(testCase)
281+
% we expect no server running on this port
282+
chat = ollamaChat("mistral",Endpoint="127.0.0.1:11433");
283+
testCase.verifyError(@() generate(chat,"hi!"), "llms:noOllamaFound");
284+
end
285+
280286
function invalidInputsConstructor(testCase, InvalidConstructorInput)
281287
testCase.verifyError(@() ollamaChat(testCase.defaultModelName, InvalidConstructorInput.Input{:}), InvalidConstructorInput.Error);
282288
end

0 commit comments

Comments
 (0)