Skip to content

Commit d3e7389

Browse files
authored
Merge pull request #18 from matlab-deep-learning/dev-update-040924models
Reflecting the updated models released on April 9, 2024
2 parents 2ef309d + e9acd81 commit d3e7389

10 files changed

+67
-50
lines changed

+llms/+internal/callOpenAIChatAPI.m

+13-9
Original file line numberDiff line numberDiff line change
@@ -119,18 +119,16 @@
119119

120120
parameters.stream = ~isempty(nvp.StreamFun);
121121

122-
if ~isempty(functions) && ~strcmp(nvp.ModelName,'gpt-4-vision-preview')
122+
if ~isempty(functions)
123123
parameters.tools = functions;
124124
end
125125

126-
if ~isempty(nvp.ToolChoice) && ~strcmp(nvp.ModelName,'gpt-4-vision-preview')
126+
if ~isempty(nvp.ToolChoice)
127127
parameters.tool_choice = nvp.ToolChoice;
128128
end
129129

130-
if ismember(nvp.ModelName,["gpt-3.5-turbo-1106","gpt-4-1106-preview"])
131-
if strcmp(nvp.ResponseFormat,"json")
132-
parameters.response_format = struct('type','json_object');
133-
end
130+
if strcmp(nvp.ResponseFormat,"json")
131+
parameters.response_format = struct('type','json_object');
134132
end
135133

136134
if ~isempty(nvp.Seed)
@@ -142,15 +140,21 @@
142140
dict = mapNVPToParameters;
143141

144142
nvpOptions = keys(dict);
145-
if strcmp(nvp.ModelName,'gpt-4-vision-preview')
146-
nvpOptions(ismember(nvpOptions,"StopSequences")) = [];
147-
end
148143

149144
for opt = nvpOptions.'
150145
if isfield(nvp, opt)
151146
parameters.(dict(opt)) = nvp.(opt);
152147
end
153148
end
149+
150+
if isempty(nvp.StopSequences)
151+
parameters = rmfield(parameters,"stop");
152+
end
153+
154+
if nvp.MaxNumTokens == Inf
155+
parameters = rmfield(parameters,"max_tokens");
156+
end
157+
154158
end
155159

156160
function dict = mapNVPToParameters()

+llms/+utils/errorMessageCatalog.m

+2
Original file line numberDiff line numberDiff line change
@@ -49,8 +49,10 @@
4949
catalog("llms:mustBeMessagesOrTxt") = "Messages must be text with one or more characters or an openAIMessages objects.";
5050
catalog("llms:invalidOptionAndValueForModel") = "'{1}' with value '{2}' is not supported for ModelName '{3}'";
5151
catalog("llms:invalidOptionForModel") = "{1} is not supported for ModelName '{2}'";
52+
catalog("llms:invalidContentTypeForModel") = "{1} is not supported for ModelName '{2}'";
5253
catalog("llms:functionNotAvailableForModel") = "This function is not supported for ModelName '{1}'";
5354
catalog("llms:promptLimitCharacter") = "Prompt must have a maximum length of {1} characters for ModelName '{2}'";
5455
catalog("llms:pngExpected") = "Argument must be a PNG image.";
5556
catalog("llms:warningJsonInstruction") = "When using JSON mode, you must also prompt the model to produce JSON yourself via a system or user message.";
57+
catalog("llms:apiReturnedError") = "OpenAI API Error: {1}";
5658
end

README.md

+14-14
Original file line numberDiff line numberDiff line change
@@ -5,11 +5,11 @@ This repository contains example code to demonstrate how to connect MATLAB to th
55
The functionality shown here serves as an interface to the ChatGPT and DALL·E APIs. To start using the OpenAI APIs, you first need to obtain OpenAI API keys. You are responsible for any fees OpenAI may charge for the use of their APIs. You should be familiar with the limitations and risks associated with using this technology, and you agree that you shall be solely responsible for full compliance with any terms that may apply to your use of the OpenAI APIs.
66

77
Some of the current LLMs supported are:
8-
- gpt-3.5-turbo, gpt-3.5-turbo-1106
9-
- gpt-4, gpt-4-1106-preview
10-
- gpt-4-vision-preview (a.k.a. GPT-4 Turbo with Vision)
8+
- gpt-3.5-turbo, gpt-3.5-turbo-1106, gpt-3.5-turbo-0125
9+
- gpt-4-turbo, gpt-4-turbo-2024-04-09 (GPT-4 Turbo with Vision)
10+
- gpt-4, gpt-4-0613
1111
- dall-e-2, dall-e-3
12-
12+
1313
For details on the specification of each model, check the official [OpenAI documentation](https://platform.openai.com/docs/models).
1414

1515
## Requirements
@@ -52,15 +52,15 @@ To use this repository with a local installation of MATLAB, first clone the repo
5252

5353
Set up your OpenAI API key. Create a `.env` file in the project root directory with the following content.
5454

55-
```
56-
OPENAI_API_KEY=<your key>
57-
```
55+
```
56+
OPENAI_API_KEY=<your key>
57+
```
5858
59-
Then load your `.env` file as follows:
59+
Then load your `.env` file as follows:
6060
61-
```matlab
62-
loadenv(".env")
63-
```
61+
```matlab
62+
loadenv(".env")
63+
```
6464

6565
## Getting Started with Chat Completion API
6666

@@ -287,13 +287,13 @@ You can extract the arguments and write the data to a table, for example.
287287

288288
### Understand the content of an image
289289

290-
You can use gpt-4-vision-preview to experiment with image understanding.
290+
You can use gpt-4-turbo to experiment with image understanding.
291291
```matlab
292-
chat = openAIChat("You are an AI assistant.", ModelName="gpt-4-vision-preview");
292+
chat = openAIChat("You are an AI assistant.", ModelName="gpt-4-turbo");
293293
image_path = "peppers.png";
294294
messages = openAIMessages;
295295
messages = addUserMessageWithImages(messages,"What is in the image?",image_path);
296-
[txt,response] = generate(chat,messages);
296+
[txt,response] = generate(chat,messages,MaxNumTokens=4096);
297297
% Should output the description of the image
298298
```
299299

Binary file not shown.
21 Bytes
Binary file not shown.
-197 Bytes
Binary file not shown.

examples/UsingDALLEToEditImages.mlx

-3.19 MB
Binary file not shown.
-1.21 MB
Binary file not shown.

openAIChat.m

+22-25
Original file line numberDiff line numberDiff line change
@@ -114,10 +114,10 @@
114114
arguments
115115
systemPrompt {llms.utils.mustBeTextOrEmpty} = []
116116
nvp.Tools (1,:) {mustBeA(nvp.Tools, "openAIFunction")} = openAIFunction.empty
117-
nvp.ModelName (1,1) {mustBeMember(nvp.ModelName,["gpt-4", "gpt-4-0613", "gpt-4-32k", ...
118-
"gpt-3.5-turbo", "gpt-4-1106-preview", ...
119-
"gpt-3.5-turbo-1106", "gpt-4-vision-preview", ...
120-
"gpt-4-turbo-preview"])} = "gpt-3.5-turbo"
117+
nvp.ModelName (1,1) {mustBeMember(nvp.ModelName,["gpt-4-turbo", ...
118+
"gpt-4-turbo-2024-04-09","gpt-4","gpt-4-0613", ...
119+
"gpt-3.5-turbo","gpt-3.5-turbo-0125", ...
120+
"gpt-3.5-turbo-1106"])} = "gpt-3.5-turbo"
121121
nvp.Temperature {mustBeValidTemperature} = 1
122122
nvp.TopProbabilityMass {mustBeValidTopP} = 1
123123
nvp.StopSequences {mustBeValidStop} = {}
@@ -131,10 +131,6 @@
131131

132132
if isfield(nvp,"StreamFun")
133133
this.StreamFun = nvp.StreamFun;
134-
if strcmp(nvp.ModelName,'gpt-4-vision-preview')
135-
error("llms:invalidOptionForModel", ...
136-
llms.utils.errorMessageCatalog.getMessage("llms:invalidOptionForModel", "StreamFun", nvp.ModelName));
137-
end
138134
else
139135
this.StreamFun = [];
140136
end
@@ -146,10 +142,6 @@
146142
else
147143
this.Tools = nvp.Tools;
148144
[this.FunctionsStruct, this.FunctionNames] = functionAsStruct(nvp.Tools);
149-
if strcmp(nvp.ModelName,'gpt-4-vision-preview')
150-
error("llms:invalidOptionForModel", ...
151-
llms.utils.errorMessageCatalog.getMessage("llms:invalidOptionForModel", "Tools", nvp.ModelName));
152-
end
153145
end
154146

155147
if ~isempty(systemPrompt)
@@ -163,20 +155,15 @@
163155
this.Temperature = nvp.Temperature;
164156
this.TopProbabilityMass = nvp.TopProbabilityMass;
165157
this.StopSequences = nvp.StopSequences;
166-
if ~isempty(nvp.StopSequences) && strcmp(nvp.ModelName,'gpt-4-vision-preview')
167-
error("llms:invalidOptionForModel", ...
168-
llms.utils.errorMessageCatalog.getMessage("llms:invalidOptionForModel", "StopSequences", nvp.ModelName));
169-
end
170-
171158

172159
% ResponseFormat is only supported in the latest models only
173160
if (nvp.ResponseFormat == "json")
174-
if ismember(this.ModelName,["gpt-3.5-turbo-1106","gpt-4-1106-preview"])
175-
warning("llms:warningJsonInstruction", ...
176-
llms.utils.errorMessageCatalog.getMessage("llms:warningJsonInstruction"))
177-
else
161+
if ismember(this.ModelName,["gpt-4","gpt-4-0613"])
178162
error("llms:invalidOptionAndValueForModel", ...
179163
llms.utils.errorMessageCatalog.getMessage("llms:invalidOptionAndValueForModel", "ResponseFormat", "json", this.ModelName));
164+
else
165+
warning("llms:warningJsonInstruction", ...
166+
llms.utils.errorMessageCatalog.getMessage("llms:warningJsonInstruction"))
180167
end
181168

182169
end
@@ -222,17 +209,20 @@
222209
end
223210

224211
toolChoice = convertToolChoice(this, nvp.ToolChoice);
225-
if ~isempty(nvp.ToolChoice) && strcmp(this.ModelName,'gpt-4-vision-preview')
226-
error("llms:invalidOptionForModel", ...
227-
llms.utils.errorMessageCatalog.getMessage("llms:invalidOptionForModel", "ToolChoice", this.ModelName));
228-
end
229212

230213
if isstring(messages) && isscalar(messages)
231214
messagesStruct = {struct("role", "user", "content", messages)};
232215
else
233216
messagesStruct = messages.Messages;
234217
end
235218

219+
if iscell(messagesStruct{end}.content) && any(cellfun(@(x) isfield(x,"image_url"), messagesStruct{end}.content))
220+
if ~ismember(this.ModelName,["gpt-4-turbo","gpt-4-turbo-2024-04-09"])
221+
error("llms:invalidContentTypeForModel", ...
222+
llms.utils.errorMessageCatalog.getMessage("llms:invalidContentTypeForModel", "Image content", this.ModelName));
223+
end
224+
end
225+
236226
if ~isempty(this.SystemPrompt)
237227
messagesStruct = horzcat(this.SystemPrompt, messagesStruct);
238228
end
@@ -244,6 +234,13 @@
244234
PresencePenalty=this.PresencePenalty, FrequencyPenalty=this.FrequencyPenalty, ...
245235
ResponseFormat=this.ResponseFormat,Seed=nvp.Seed, ...
246236
ApiKey=this.ApiKey,TimeOut=this.TimeOut, StreamFun=this.StreamFun);
237+
238+
if isfield(response.Body.Data,"error")
239+
err = response.Body.Data.error.message;
240+
text = llms.utils.errorMessageCatalog.getMessage("llms:apiReturnedError",err);
241+
message = struct("role","assistant","content",text);
242+
end
243+
247244
end
248245

249246
function this = set.Temperature(this, temperature)

tests/topenAIChat.m

+16-2
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ function generateAcceptsMessagesAsInput(testCase)
3838
end
3939

4040
function constructMdlWithInvalidParameters(testCase)
41-
testCase.verifyError(@()openAIChat(ApiKey="this-is-not-a-real-key", ResponseFormat="json"), "llms:invalidOptionAndValueForModel");
41+
testCase.verifyError(@()openAIChat(ApiKey="this-is-not-a-real-key", ModelName="gpt-4", ResponseFormat="json"), "llms:invalidOptionAndValueForModel");
4242
end
4343

4444
function keyNotFound(testCase)
@@ -100,7 +100,21 @@ function assignValueToProperty(property, value)
100100
end
101101

102102
testCase.verifyError(@()assignValueToProperty(InvalidValuesSetters.Property,InvalidValuesSetters.Value), InvalidValuesSetters.Error);
103-
end
103+
end
104+
105+
function invalidGenerateInputforModel(testCase)
106+
chat = openAIChat(ApiKey="this-is-not-a-real-key");
107+
image_path = "peppers.png";
108+
emptyMessages = openAIMessages;
109+
inValidMessages = addUserMessageWithImages(emptyMessages,"What is in the image?",image_path);
110+
testCase.verifyError(@()generate(chat,inValidMessages), "llms:invalidContentTypeForModel")
111+
end
112+
113+
function noStopSequencesNoMaxNumTokens(testCase)
114+
chat = openAIChat(ApiKey="this-is-not-a-real-key");
115+
testCase.verifyWarningFree(@()generate(chat,"This is okay"));
116+
end
117+
104118
end
105119
end
106120

0 commit comments

Comments
 (0)