Skip to content

Commit 00e78ba

Browse files
Cheuk Lun Koccreutzi
authored andcommitted
Remove hardcoded model support checks for OpenAI
OpenAI has started changing the support of features between model releases, without making announcements. It is not feasible for us to keep updating the list of which model supports what, and the end-user benefit is rather limited in the first place. Thus, we now rely on just forwarding the server errors, the same as we've always done for Azure and Ollama.
1 parent e978dc5 commit 00e78ba

File tree

4 files changed

+1
-45
lines changed

4 files changed

+1
-45
lines changed

+llms/+utils/errorMessageCatalog.m

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -61,15 +61,12 @@
6161
catalog("llms:mustSetFunctionsForCall") = "When no functions are defined, ToolChoice must not be specified.";
6262
catalog("llms:mustBeMessagesOrTxt") = "Message must be nonempty string, character array, cell array of character vectors, or messageHistory object.";
6363
catalog("llms:invalidOptionAndValueForModel") = "'{1}' with value '{2}' is not supported for model ""{3}"".";
64-
catalog("llms:noStructuredOutputForModel") = "Structured output is not supported for model ""{1}"".";
6564
catalog("llms:noStructuredOutputForAzureDeployment") = "Structured output is not supported for deployment ""{1}"".";
6665
catalog("llms:structuredOutputRequiresAPI") = "Structured output is not supported for API version ""{1}"". Use APIVersion=""2024-08-01-preview"" or newer.";
6766
catalog("llms:invalidOptionForModel") = "Invalid argument name {1} for model ""{2}"".";
68-
catalog("llms:invalidContentTypeForModel") = "{1} is not supported for model ""{2}"".";
6967
catalog("llms:functionNotAvailableForModel") = "Image editing is not supported for model ""{1}"".";
7068
catalog("llms:promptLimitCharacter") = "Prompt must contain at most {1} characters for model ""{2}"".";
7169
catalog("llms:pngExpected") = "Image must be a PNG file (*.png).";
72-
catalog("llms:warningJsonInstruction") = "When using JSON mode, you must also prompt the model to produce JSON yourself via a system or user message.";
7370
catalog("llms:apiReturnedError") = "Server returned error indicating: ""{1}""";
7471
catalog("llms:apiReturnedIncompleteJSON") = "Generated output is not valid JSON: ""{1}""";
7572
catalog("llms:dimensionsMustBeSmallerThan") = "Dimensions must be less than or equal to {1}.";

openAIChat.m

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -135,11 +135,7 @@
135135
this.Temperature = nvp.Temperature;
136136
this.TopP = nvp.TopP;
137137
this.StopSequences = nvp.StopSequences;
138-
139-
% ResponseFormat is only supported in the latest models only
140-
llms.openai.validateResponseFormat(nvp.ResponseFormat, this.ModelName);
141138
this.ResponseFormat = nvp.ResponseFormat;
142-
143139
this.PresencePenalty = nvp.PresencePenalty;
144140
this.FrequencyPenalty = nvp.FrequencyPenalty;
145141
this.APIKey = llms.internal.getApiKeyFromNvpOrEnv(nvp,"OPENAI_API_KEY");
@@ -239,13 +235,10 @@
239235
messagesStruct = this.encodeImages(messages.Messages);
240236
end
241237

242-
llms.openai.validateMessageSupported(messagesStruct{end}, nvp.ModelName);
243238
if ~isempty(this.SystemPrompt)
244239
messagesStruct = horzcat(this.SystemPrompt, messagesStruct);
245240
end
246241

247-
llms.openai.validateResponseFormat(nvp.ResponseFormat, nvp.ModelName, messagesStruct);
248-
249242
if isfield(nvp,"StreamFun")
250243
streamFun = nvp.StreamFun;
251244
else

tests/hopenAIChat.m

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -196,12 +196,6 @@ function createChatWithStreamFunc(testCase)
196196
testCase.verifyGreaterThan(numel(sf("")), 1);
197197
end
198198

199-
function errorJSONResponseFormat(testCase)
200-
testCase.verifyError( ...
201-
@() generate(testCase.structuredModel,"create some address",ResponseFormat="json"), ...
202-
"llms:warningJsonInstruction");
203-
end
204-
205199
function jsonFormatWithPrompt(testCase)
206200
testCase.verifyClass( ...
207201
generate(testCase.structuredModel,"create some address, return json",ResponseFormat="json"), ...

tests/topenAIChat.m

Lines changed: 1 addition & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,6 @@
1616
defaultModel = openAIChat;
1717
visionModel = openAIChat;
1818
structuredModel = openAIChat;
19-
noStructuredOutputModel = openAIChat(ModelName="gpt-3.5-turbo");
2019
gpt35Model = openAIChat(ModelName="gpt-3.5-turbo");
2120
end
2221

@@ -78,15 +77,6 @@ function canUseModel(testCase,ModelName)
7877
"string");
7978
end
8079

81-
function gpt35TurboErrorsForImages(testCase)
82-
chat = openAIChat(APIKey="this-is-not-a-real-key",ModelName="gpt-3.5-turbo");
83-
image_path = "peppers.png";
84-
emptyMessages = messageHistory;
85-
inValidMessages = addUserMessageWithImages(emptyMessages,"What is in the image?",image_path);
86-
87-
testCase.verifyError(@()generate(chat,inValidMessages), "llms:invalidContentTypeForModel")
88-
end
89-
9080
function jsonFormatWithSystemPrompt(testCase)
9181
chat = openAIChat("Respond in JSON format.");
9282
testCase.verifyClass( ...
@@ -101,13 +91,6 @@ function doReturnErrors(testCase)
10191
wayTooLong = string(repmat('a ',1,20000));
10292
testCase.verifyError(@() generate(chat,wayTooLong), "llms:apiReturnedError");
10393
end
104-
105-
function specialErrorForUnsupportedResponseFormat(testCase)
106-
testCase.verifyError(@() generate(testCase.noStructuredOutputModel, ...
107-
"What is the smallest prime?", ...
108-
ResponseFormat=struct("number",1)), ...
109-
"llms:noStructuredOutputForModel");
110-
end
11194
end
11295

11396
methods
@@ -410,14 +393,6 @@ function specialErrorForUnsupportedResponseFormat(testCase)
410393
"Input",{{"ResponseFormat", ["text" "text"] }},...
411394
"Error", "MATLAB:validators:mustBeTextScalar"), ...
412395
...
413-
"InvalidResponseFormatModelCombination", struct( ...
414-
"Input", {{"APIKey", "this-is-not-a-real-key", "ModelName", "gpt-4", "ResponseFormat", "json"}}, ...
415-
"Error", "llms:invalidOptionAndValueForModel"), ...
416-
...
417-
"InvalidResponseFormatModelCombination2", struct( ...
418-
"Input", {{"APIKey", "this-is-not-a-real-key", "ModelName", "o1-mini", "ResponseFormat", "json"}}, ...
419-
"Error", "llms:invalidOptionAndValueForModel"), ...
420-
...
421396
"InvalidStreamFunType", struct( ...
422397
"Input",{{"StreamFun", "2" }},...
423398
"Error", "MATLAB:validators:mustBeA"), ...
@@ -548,10 +523,7 @@ function specialErrorForUnsupportedResponseFormat(testCase)
548523
...
549524
"InvalidApiKeySize",struct( ...
550525
"Input",{{ "APIKey" ["abc" "abc"] }},...
551-
"Error","MATLAB:validators:mustBeTextScalar"),...
552-
"StructuredOutputForWrongModel",struct( ...
553-
"Input",{{ "ModelName" "gpt-3.5-turbo" "ResponseFormat" struct("a", 1)}},...
554-
"Error","llms:noStructuredOutputForModel"));
526+
"Error","MATLAB:validators:mustBeTextScalar"));
555527
end
556528

557529
function invalidGenerateInput = iGetInvalidGenerateInput()

0 commit comments

Comments
 (0)